var/home/core/zuul-output/0000755000175000017500000000000015110730123014516 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015110740611015465 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004657733715110740604017721 0ustar rootrootNov 24 01:12:54 crc systemd[1]: Starting Kubernetes Kubelet... Nov 24 01:12:54 crc restorecon[4672]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:54 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:55 crc restorecon[4672]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 01:12:55 crc restorecon[4672]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Nov 24 01:12:55 crc kubenswrapper[4755]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 01:12:55 crc kubenswrapper[4755]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Nov 24 01:12:55 crc kubenswrapper[4755]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 01:12:55 crc kubenswrapper[4755]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 01:12:55 crc kubenswrapper[4755]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Nov 24 01:12:55 crc kubenswrapper[4755]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.764100 4755 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768873 4755 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768891 4755 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768895 4755 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768899 4755 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768904 4755 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768907 4755 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768913 4755 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768918 4755 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768922 4755 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768927 4755 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768931 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768936 4755 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768942 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768946 4755 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768950 4755 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768954 4755 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768957 4755 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768961 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768965 4755 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768969 4755 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768973 4755 feature_gate.go:330] unrecognized feature gate: Example Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768978 4755 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768982 4755 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768986 4755 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768991 4755 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768995 4755 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.768998 4755 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769002 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769005 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769008 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769012 4755 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769015 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769019 4755 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769023 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769027 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769032 4755 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769035 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769039 4755 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769042 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769047 4755 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769052 4755 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769056 4755 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769059 4755 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769063 4755 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769066 4755 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769071 4755 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769075 4755 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769079 4755 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769082 4755 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769085 4755 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769088 4755 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769092 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769095 4755 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769099 4755 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769102 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769105 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769109 4755 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769112 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769116 4755 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769119 4755 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769122 4755 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769126 4755 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769129 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769132 4755 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769136 4755 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769139 4755 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769142 4755 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769148 4755 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769152 4755 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769156 4755 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.769159 4755 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769248 4755 flags.go:64] FLAG: --address="0.0.0.0" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769256 4755 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769266 4755 flags.go:64] FLAG: --anonymous-auth="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769294 4755 flags.go:64] FLAG: --application-metrics-count-limit="100" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769300 4755 flags.go:64] FLAG: --authentication-token-webhook="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769304 4755 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769312 4755 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769318 4755 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769323 4755 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769327 4755 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769333 4755 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769339 4755 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769343 4755 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769349 4755 flags.go:64] FLAG: --cgroup-root="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769353 4755 flags.go:64] FLAG: --cgroups-per-qos="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769358 4755 flags.go:64] FLAG: --client-ca-file="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769362 4755 flags.go:64] FLAG: --cloud-config="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769366 4755 flags.go:64] FLAG: --cloud-provider="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769371 4755 flags.go:64] FLAG: --cluster-dns="[]" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769706 4755 flags.go:64] FLAG: --cluster-domain="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769711 4755 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769717 4755 flags.go:64] FLAG: --config-dir="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769723 4755 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769730 4755 flags.go:64] FLAG: --container-log-max-files="5" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769737 4755 flags.go:64] FLAG: --container-log-max-size="10Mi" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769742 4755 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769748 4755 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769754 4755 flags.go:64] FLAG: --containerd-namespace="k8s.io" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769758 4755 flags.go:64] FLAG: --contention-profiling="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769762 4755 flags.go:64] FLAG: --cpu-cfs-quota="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769767 4755 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769772 4755 flags.go:64] FLAG: --cpu-manager-policy="none" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769776 4755 flags.go:64] FLAG: --cpu-manager-policy-options="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769781 4755 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769785 4755 flags.go:64] FLAG: --enable-controller-attach-detach="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769790 4755 flags.go:64] FLAG: --enable-debugging-handlers="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769795 4755 flags.go:64] FLAG: --enable-load-reader="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769800 4755 flags.go:64] FLAG: --enable-server="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769807 4755 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769814 4755 flags.go:64] FLAG: --event-burst="100" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769819 4755 flags.go:64] FLAG: --event-qps="50" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769824 4755 flags.go:64] FLAG: --event-storage-age-limit="default=0" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769829 4755 flags.go:64] FLAG: --event-storage-event-limit="default=0" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769834 4755 flags.go:64] FLAG: --eviction-hard="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769840 4755 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769845 4755 flags.go:64] FLAG: --eviction-minimum-reclaim="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769849 4755 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769854 4755 flags.go:64] FLAG: --eviction-soft="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769859 4755 flags.go:64] FLAG: --eviction-soft-grace-period="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769863 4755 flags.go:64] FLAG: --exit-on-lock-contention="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769869 4755 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769873 4755 flags.go:64] FLAG: --experimental-mounter-path="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769878 4755 flags.go:64] FLAG: --fail-cgroupv1="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769883 4755 flags.go:64] FLAG: --fail-swap-on="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769889 4755 flags.go:64] FLAG: --feature-gates="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769896 4755 flags.go:64] FLAG: --file-check-frequency="20s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769901 4755 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769906 4755 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769911 4755 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769916 4755 flags.go:64] FLAG: --healthz-port="10248" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769921 4755 flags.go:64] FLAG: --help="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769925 4755 flags.go:64] FLAG: --hostname-override="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769930 4755 flags.go:64] FLAG: --housekeeping-interval="10s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769935 4755 flags.go:64] FLAG: --http-check-frequency="20s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769940 4755 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769945 4755 flags.go:64] FLAG: --image-credential-provider-config="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769949 4755 flags.go:64] FLAG: --image-gc-high-threshold="85" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769954 4755 flags.go:64] FLAG: --image-gc-low-threshold="80" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769959 4755 flags.go:64] FLAG: --image-service-endpoint="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769964 4755 flags.go:64] FLAG: --kernel-memcg-notification="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769969 4755 flags.go:64] FLAG: --kube-api-burst="100" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769974 4755 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769980 4755 flags.go:64] FLAG: --kube-api-qps="50" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769984 4755 flags.go:64] FLAG: --kube-reserved="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769988 4755 flags.go:64] FLAG: --kube-reserved-cgroup="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769992 4755 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.769997 4755 flags.go:64] FLAG: --kubelet-cgroups="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770001 4755 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770005 4755 flags.go:64] FLAG: --lock-file="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770009 4755 flags.go:64] FLAG: --log-cadvisor-usage="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770013 4755 flags.go:64] FLAG: --log-flush-frequency="5s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770017 4755 flags.go:64] FLAG: --log-json-info-buffer-size="0" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770023 4755 flags.go:64] FLAG: --log-json-split-stream="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770028 4755 flags.go:64] FLAG: --log-text-info-buffer-size="0" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770032 4755 flags.go:64] FLAG: --log-text-split-stream="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770036 4755 flags.go:64] FLAG: --logging-format="text" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770040 4755 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770045 4755 flags.go:64] FLAG: --make-iptables-util-chains="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770049 4755 flags.go:64] FLAG: --manifest-url="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770053 4755 flags.go:64] FLAG: --manifest-url-header="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770059 4755 flags.go:64] FLAG: --max-housekeeping-interval="15s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770063 4755 flags.go:64] FLAG: --max-open-files="1000000" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770069 4755 flags.go:64] FLAG: --max-pods="110" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770073 4755 flags.go:64] FLAG: --maximum-dead-containers="-1" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770078 4755 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770082 4755 flags.go:64] FLAG: --memory-manager-policy="None" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770087 4755 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770091 4755 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770095 4755 flags.go:64] FLAG: --node-ip="192.168.126.11" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770099 4755 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770109 4755 flags.go:64] FLAG: --node-status-max-images="50" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770113 4755 flags.go:64] FLAG: --node-status-update-frequency="10s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770118 4755 flags.go:64] FLAG: --oom-score-adj="-999" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770122 4755 flags.go:64] FLAG: --pod-cidr="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770125 4755 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770132 4755 flags.go:64] FLAG: --pod-manifest-path="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770137 4755 flags.go:64] FLAG: --pod-max-pids="-1" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770141 4755 flags.go:64] FLAG: --pods-per-core="0" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770146 4755 flags.go:64] FLAG: --port="10250" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770150 4755 flags.go:64] FLAG: --protect-kernel-defaults="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770154 4755 flags.go:64] FLAG: --provider-id="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770158 4755 flags.go:64] FLAG: --qos-reserved="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770162 4755 flags.go:64] FLAG: --read-only-port="10255" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770166 4755 flags.go:64] FLAG: --register-node="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770170 4755 flags.go:64] FLAG: --register-schedulable="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770174 4755 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770182 4755 flags.go:64] FLAG: --registry-burst="10" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770186 4755 flags.go:64] FLAG: --registry-qps="5" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770189 4755 flags.go:64] FLAG: --reserved-cpus="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770193 4755 flags.go:64] FLAG: --reserved-memory="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770199 4755 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770204 4755 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770208 4755 flags.go:64] FLAG: --rotate-certificates="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770216 4755 flags.go:64] FLAG: --rotate-server-certificates="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770220 4755 flags.go:64] FLAG: --runonce="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770224 4755 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770229 4755 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770233 4755 flags.go:64] FLAG: --seccomp-default="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770237 4755 flags.go:64] FLAG: --serialize-image-pulls="true" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770242 4755 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770247 4755 flags.go:64] FLAG: --storage-driver-db="cadvisor" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770251 4755 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770255 4755 flags.go:64] FLAG: --storage-driver-password="root" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770259 4755 flags.go:64] FLAG: --storage-driver-secure="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770263 4755 flags.go:64] FLAG: --storage-driver-table="stats" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770267 4755 flags.go:64] FLAG: --storage-driver-user="root" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770270 4755 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770275 4755 flags.go:64] FLAG: --sync-frequency="1m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770279 4755 flags.go:64] FLAG: --system-cgroups="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770284 4755 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770290 4755 flags.go:64] FLAG: --system-reserved-cgroup="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770294 4755 flags.go:64] FLAG: --tls-cert-file="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770298 4755 flags.go:64] FLAG: --tls-cipher-suites="[]" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770303 4755 flags.go:64] FLAG: --tls-min-version="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770308 4755 flags.go:64] FLAG: --tls-private-key-file="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770313 4755 flags.go:64] FLAG: --topology-manager-policy="none" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770317 4755 flags.go:64] FLAG: --topology-manager-policy-options="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770321 4755 flags.go:64] FLAG: --topology-manager-scope="container" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770325 4755 flags.go:64] FLAG: --v="2" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770330 4755 flags.go:64] FLAG: --version="false" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770336 4755 flags.go:64] FLAG: --vmodule="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770344 4755 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.770348 4755 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774752 4755 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774807 4755 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774827 4755 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774845 4755 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774859 4755 feature_gate.go:330] unrecognized feature gate: Example Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774871 4755 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774883 4755 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774895 4755 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774908 4755 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774919 4755 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774934 4755 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774948 4755 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774959 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774969 4755 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774982 4755 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.774993 4755 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775004 4755 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775016 4755 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775027 4755 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775037 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775048 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775060 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775070 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775080 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775090 4755 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775100 4755 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775110 4755 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775121 4755 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775133 4755 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775143 4755 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775154 4755 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775165 4755 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775176 4755 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775192 4755 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775206 4755 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775220 4755 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775233 4755 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775246 4755 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775258 4755 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775271 4755 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775282 4755 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775293 4755 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775305 4755 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775315 4755 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775325 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775335 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775346 4755 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775357 4755 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775368 4755 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775378 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775388 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775399 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775409 4755 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775419 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775429 4755 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775440 4755 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775451 4755 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775462 4755 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775473 4755 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775484 4755 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775494 4755 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775505 4755 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775516 4755 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775530 4755 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775543 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775554 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775569 4755 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775583 4755 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775594 4755 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775644 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.775656 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.775676 4755 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.787300 4755 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.787345 4755 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787414 4755 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787423 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787427 4755 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787432 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787437 4755 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787442 4755 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787449 4755 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787454 4755 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787458 4755 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787462 4755 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787466 4755 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787470 4755 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787474 4755 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787477 4755 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787481 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787485 4755 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787489 4755 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787493 4755 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787497 4755 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787502 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787506 4755 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787510 4755 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787515 4755 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787519 4755 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787525 4755 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787530 4755 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787535 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787539 4755 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787543 4755 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787547 4755 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787551 4755 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787556 4755 feature_gate.go:330] unrecognized feature gate: Example Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787560 4755 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787564 4755 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787575 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787580 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787584 4755 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787588 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787592 4755 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787595 4755 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787610 4755 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787615 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787620 4755 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787624 4755 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787627 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787631 4755 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787635 4755 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787638 4755 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787644 4755 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787648 4755 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787651 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787655 4755 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787659 4755 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787663 4755 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787666 4755 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787670 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787674 4755 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787677 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787681 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787710 4755 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787713 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787717 4755 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787721 4755 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787724 4755 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787728 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787731 4755 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787735 4755 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787740 4755 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787744 4755 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787749 4755 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787757 4755 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.787765 4755 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787881 4755 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787887 4755 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787891 4755 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787895 4755 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787899 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787903 4755 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787908 4755 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787913 4755 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787917 4755 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787921 4755 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787925 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787929 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787932 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787935 4755 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787939 4755 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787943 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787946 4755 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787950 4755 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787954 4755 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787957 4755 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787962 4755 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787966 4755 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787970 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787974 4755 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787978 4755 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787981 4755 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787986 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787990 4755 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787994 4755 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.787998 4755 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788002 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788005 4755 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788009 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788012 4755 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788016 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788019 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788023 4755 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788026 4755 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788030 4755 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788033 4755 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788037 4755 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788041 4755 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788045 4755 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788049 4755 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788052 4755 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788056 4755 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788060 4755 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788063 4755 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788066 4755 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788070 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788074 4755 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788077 4755 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788081 4755 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788084 4755 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788088 4755 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788091 4755 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788095 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788098 4755 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788102 4755 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788106 4755 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788109 4755 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788113 4755 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788117 4755 feature_gate.go:330] unrecognized feature gate: Example Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788120 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788123 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788127 4755 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788131 4755 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788136 4755 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788140 4755 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788145 4755 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.788149 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.788155 4755 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.788346 4755 server.go:940] "Client rotation is on, will bootstrap in background" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.793859 4755 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.794280 4755 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.796744 4755 server.go:997] "Starting client certificate rotation" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.796794 4755 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.797082 4755 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-27 08:21:57.192714452 +0000 UTC Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.797212 4755 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 799h9m1.395508078s for next certificate rotation Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.823132 4755 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.825792 4755 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.841782 4755 log.go:25] "Validated CRI v1 runtime API" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.877447 4755 log.go:25] "Validated CRI v1 image API" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.882388 4755 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.889240 4755 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-11-24-01-08-28-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.889283 4755 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.911882 4755 manager.go:217] Machine: {Timestamp:2025-11-24 01:12:55.908387747 +0000 UTC m=+0.594453288 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:cb6dbfa2-ee9a-4406-af65-1558b4c6cb25 BootID:50bb41a3-bb20-461c-ba4c-72998ece87bc Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:ac:1c:92 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:ac:1c:92 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:91:08:ee Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:18:cb:55 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:73:c7:0c Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:d7:03:b0 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:76:48:2b:ab:db:25 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:76:9b:1b:1a:dc:2d Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.912234 4755 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.912474 4755 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.913007 4755 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.913189 4755 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.913247 4755 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.913502 4755 topology_manager.go:138] "Creating topology manager with none policy" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.913514 4755 container_manager_linux.go:303] "Creating device plugin manager" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.914127 4755 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.914166 4755 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.914867 4755 state_mem.go:36] "Initialized new in-memory state store" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.914964 4755 server.go:1245] "Using root directory" path="/var/lib/kubelet" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.918461 4755 kubelet.go:418] "Attempting to sync node with API server" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.918486 4755 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.918530 4755 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.918546 4755 kubelet.go:324] "Adding apiserver pod source" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.918560 4755 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.922917 4755 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.924771 4755 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.926258 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.926350 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:55 crc kubenswrapper[4755]: E1124 01:12:55.926486 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.103:6443: connect: connection refused" logger="UnhandledError" Nov 24 01:12:55 crc kubenswrapper[4755]: E1124 01:12:55.926403 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.103:6443: connect: connection refused" logger="UnhandledError" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.927048 4755 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928321 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928345 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928363 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928371 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928383 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928393 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928401 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928414 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928424 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928431 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928442 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.928450 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.929035 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.929563 4755 server.go:1280] "Started kubelet" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.930659 4755 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Nov 24 01:12:55 crc systemd[1]: Started Kubernetes Kubelet. Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.930719 4755 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.933664 4755 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.934319 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.934380 4755 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Nov 24 01:12:55 crc kubenswrapper[4755]: E1124 01:12:55.934999 4755 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.935222 4755 volume_manager.go:287] "The desired_state_of_world populator starts" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.935361 4755 volume_manager.go:289] "Starting Kubelet Volume Manager" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.935463 4755 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.935454 4755 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.935019 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 12:39:30.213727197 +0000 UTC Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.935734 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1259h26m34.278006916s for next certificate rotation Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.935776 4755 server.go:460] "Adding debug handlers to kubelet server" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.945333 4755 factory.go:55] Registering systemd factory Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.945341 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.945501 4755 factory.go:221] Registration of the systemd container factory successfully Nov 24 01:12:55 crc kubenswrapper[4755]: E1124 01:12:55.945578 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.103:6443: connect: connection refused" logger="UnhandledError" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.946633 4755 factory.go:153] Registering CRI-O factory Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.946697 4755 factory.go:221] Registration of the crio container factory successfully Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.946809 4755 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.946846 4755 factory.go:103] Registering Raw factory Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.947032 4755 manager.go:1196] Started watching for new ooms in manager Nov 24 01:12:55 crc kubenswrapper[4755]: E1124 01:12:55.945467 4755 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.103:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187acc3f35f9ba44 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-24 01:12:55.929535044 +0000 UTC m=+0.615600545,LastTimestamp:2025-11-24 01:12:55.929535044 +0000 UTC m=+0.615600545,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 24 01:12:55 crc kubenswrapper[4755]: E1124 01:12:55.951337 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.103:6443: connect: connection refused" interval="200ms" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.953394 4755 manager.go:319] Starting recovery of all containers Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960047 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960143 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960165 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960186 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960205 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960224 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960244 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960263 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960285 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960303 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960323 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960341 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960386 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960406 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960424 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960442 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960459 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960516 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960536 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960578 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960644 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960670 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960704 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960765 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960791 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960837 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960882 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960911 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960955 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.960984 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961002 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961023 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961041 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961059 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961078 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961096 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961131 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961149 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961168 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961184 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961202 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961219 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961237 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961254 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961297 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961317 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961353 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961371 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961389 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961408 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961427 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961447 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961493 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961520 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961540 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961561 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961580 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961635 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961656 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961680 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961742 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961765 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961783 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961800 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961825 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961850 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961889 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961907 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961939 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961956 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.961982 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962008 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962025 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962066 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962085 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962101 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962146 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962166 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962225 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962285 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962303 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962321 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962345 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962362 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962419 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962437 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962461 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962486 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962503 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962522 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962540 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962566 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962660 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962677 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.962697 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.963754 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.963891 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.963923 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.963942 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.963961 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964009 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964031 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964052 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964080 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964229 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964269 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964296 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964320 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964408 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964448 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964471 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964498 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964525 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964548 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964573 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964633 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964676 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964700 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964720 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964770 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964791 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964813 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964833 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964853 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964893 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964914 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964940 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964961 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.964979 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965000 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965017 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965056 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965092 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965111 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965159 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965180 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965200 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965219 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965239 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965261 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965300 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965320 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965345 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965395 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.965418 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969350 4755 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969436 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969468 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969511 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969531 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969572 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969589 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969640 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969659 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969678 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969696 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969716 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969735 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969766 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969793 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969826 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969854 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969894 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969911 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969929 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969947 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969964 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969979 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.969999 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970013 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970028 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970050 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970072 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970085 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970100 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970114 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970130 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970150 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970170 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970196 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970211 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970231 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970251 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970265 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970279 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970301 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970320 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970340 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970356 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970371 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970384 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970406 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970429 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970444 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970459 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970473 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970488 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970503 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970517 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970538 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970557 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970575 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970594 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970638 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970662 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970683 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970702 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970717 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970736 4755 reconstruct.go:97] "Volume reconstruction finished" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.970746 4755 reconciler.go:26] "Reconciler: start to sync state" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.982793 4755 manager.go:324] Recovery completed Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.993166 4755 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.995249 4755 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.995309 4755 status_manager.go:217] "Starting to sync pod status with apiserver" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.995352 4755 kubelet.go:2335] "Starting kubelet main sync loop" Nov 24 01:12:55 crc kubenswrapper[4755]: E1124 01:12:55.995435 4755 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Nov 24 01:12:55 crc kubenswrapper[4755]: W1124 01:12:55.998490 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:55 crc kubenswrapper[4755]: E1124 01:12:55.998555 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.103:6443: connect: connection refused" logger="UnhandledError" Nov 24 01:12:55 crc kubenswrapper[4755]: I1124 01:12:55.998642 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.001524 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.001571 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.001586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.003111 4755 cpu_manager.go:225] "Starting CPU manager" policy="none" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.003131 4755 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.003150 4755 state_mem.go:36] "Initialized new in-memory state store" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.016679 4755 policy_none.go:49] "None policy: Start" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.018158 4755 memory_manager.go:170] "Starting memorymanager" policy="None" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.018181 4755 state_mem.go:35] "Initializing new in-memory state store" Nov 24 01:12:56 crc kubenswrapper[4755]: E1124 01:12:56.036179 4755 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.074489 4755 manager.go:334] "Starting Device Plugin manager" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.074531 4755 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.074542 4755 server.go:79] "Starting device plugin registration server" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.074905 4755 eviction_manager.go:189] "Eviction manager: starting control loop" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.074922 4755 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.075089 4755 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.077700 4755 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.077715 4755 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Nov 24 01:12:56 crc kubenswrapper[4755]: E1124 01:12:56.083854 4755 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.096059 4755 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.096135 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.097002 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.097033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.097042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.097151 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.097359 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.097408 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.097678 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.097719 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.097728 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.097845 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098042 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098097 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098688 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098715 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098727 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098752 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098768 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098796 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098823 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098898 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098917 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098938 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.098984 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.099313 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.099340 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.099349 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.099421 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.099632 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.099700 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.100125 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.100147 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.100127 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.100167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.100176 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.100154 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.100315 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.100335 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.100971 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.101003 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.101014 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.101136 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.101162 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.101172 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: E1124 01:12:56.152543 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.103:6443: connect: connection refused" interval="400ms" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173482 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173540 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173582 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173653 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173693 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173729 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173772 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173816 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173881 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173926 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.173998 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.174045 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.174069 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.174092 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.174112 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.175669 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.177050 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.177092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.177106 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.177136 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 01:12:56 crc kubenswrapper[4755]: E1124 01:12:56.177525 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.103:6443: connect: connection refused" node="crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.275868 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.275934 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.275970 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276002 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276033 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276062 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276092 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276119 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276143 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276177 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276183 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276231 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276222 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276289 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276307 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276263 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276166 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276330 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276263 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276361 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276119 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276375 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276386 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276325 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276295 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276416 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276462 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276491 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276526 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.276653 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.377679 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.379698 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.379736 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.379746 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.379769 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 01:12:56 crc kubenswrapper[4755]: E1124 01:12:56.380200 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.103:6443: connect: connection refused" node="crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.433877 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.441001 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.459301 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.481202 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: W1124 01:12:56.483326 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-bb84e88ed5c0dde94e83ae98ec8671befe074b3f774a2942846367ed35f3bb6e WatchSource:0}: Error finding container bb84e88ed5c0dde94e83ae98ec8671befe074b3f774a2942846367ed35f3bb6e: Status 404 returned error can't find the container with id bb84e88ed5c0dde94e83ae98ec8671befe074b3f774a2942846367ed35f3bb6e Nov 24 01:12:56 crc kubenswrapper[4755]: W1124 01:12:56.488326 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-f2645bff3aaa790f12db8aeefd86944c6eebdbdd805772d78703e8788101a4f1 WatchSource:0}: Error finding container f2645bff3aaa790f12db8aeefd86944c6eebdbdd805772d78703e8788101a4f1: Status 404 returned error can't find the container with id f2645bff3aaa790f12db8aeefd86944c6eebdbdd805772d78703e8788101a4f1 Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.489906 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:12:56 crc kubenswrapper[4755]: W1124 01:12:56.499193 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-2c2f0080d0b2968919f2aca18b462cfe6b2111e2b4e39f1180fff76f903eedcb WatchSource:0}: Error finding container 2c2f0080d0b2968919f2aca18b462cfe6b2111e2b4e39f1180fff76f903eedcb: Status 404 returned error can't find the container with id 2c2f0080d0b2968919f2aca18b462cfe6b2111e2b4e39f1180fff76f903eedcb Nov 24 01:12:56 crc kubenswrapper[4755]: W1124 01:12:56.501519 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-25dd204893c06ea8bb5e6ea174dff04f84c04d9b966ff81170a6afa0e91018e4 WatchSource:0}: Error finding container 25dd204893c06ea8bb5e6ea174dff04f84c04d9b966ff81170a6afa0e91018e4: Status 404 returned error can't find the container with id 25dd204893c06ea8bb5e6ea174dff04f84c04d9b966ff81170a6afa0e91018e4 Nov 24 01:12:56 crc kubenswrapper[4755]: W1124 01:12:56.514760 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-60327b85af5f435d39e9231aafb8d9e6291e5061e214977d751ea36373f15621 WatchSource:0}: Error finding container 60327b85af5f435d39e9231aafb8d9e6291e5061e214977d751ea36373f15621: Status 404 returned error can't find the container with id 60327b85af5f435d39e9231aafb8d9e6291e5061e214977d751ea36373f15621 Nov 24 01:12:56 crc kubenswrapper[4755]: E1124 01:12:56.553284 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.103:6443: connect: connection refused" interval="800ms" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.780511 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.782393 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.782441 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.782453 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.782488 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 01:12:56 crc kubenswrapper[4755]: E1124 01:12:56.783136 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.103:6443: connect: connection refused" node="crc" Nov 24 01:12:56 crc kubenswrapper[4755]: I1124 01:12:56.935741 4755 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:56 crc kubenswrapper[4755]: W1124 01:12:56.941846 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:56 crc kubenswrapper[4755]: E1124 01:12:56.941949 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.103:6443: connect: connection refused" logger="UnhandledError" Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.001635 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2c2f0080d0b2968919f2aca18b462cfe6b2111e2b4e39f1180fff76f903eedcb"} Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.004236 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"25dd204893c06ea8bb5e6ea174dff04f84c04d9b966ff81170a6afa0e91018e4"} Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.006954 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bb84e88ed5c0dde94e83ae98ec8671befe074b3f774a2942846367ed35f3bb6e"} Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.008251 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f2645bff3aaa790f12db8aeefd86944c6eebdbdd805772d78703e8788101a4f1"} Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.009703 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"60327b85af5f435d39e9231aafb8d9e6291e5061e214977d751ea36373f15621"} Nov 24 01:12:57 crc kubenswrapper[4755]: W1124 01:12:57.245343 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:57 crc kubenswrapper[4755]: E1124 01:12:57.245440 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.103:6443: connect: connection refused" logger="UnhandledError" Nov 24 01:12:57 crc kubenswrapper[4755]: W1124 01:12:57.248213 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:57 crc kubenswrapper[4755]: E1124 01:12:57.248277 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.103:6443: connect: connection refused" logger="UnhandledError" Nov 24 01:12:57 crc kubenswrapper[4755]: E1124 01:12:57.354952 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.103:6443: connect: connection refused" interval="1.6s" Nov 24 01:12:57 crc kubenswrapper[4755]: W1124 01:12:57.527663 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:57 crc kubenswrapper[4755]: E1124 01:12:57.527739 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.103:6443: connect: connection refused" logger="UnhandledError" Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.583754 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.585098 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.585134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.585147 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.585174 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 01:12:57 crc kubenswrapper[4755]: E1124 01:12:57.585713 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.103:6443: connect: connection refused" node="crc" Nov 24 01:12:57 crc kubenswrapper[4755]: I1124 01:12:57.935551 4755 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.013425 4755 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97" exitCode=0 Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.013518 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97"} Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.013549 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.014543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.014577 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.014593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.014846 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd" exitCode=0 Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.014909 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd"} Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.015001 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.015887 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.015908 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.015916 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.016249 4755 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47" exitCode=0 Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.016292 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47"} Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.016312 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.016882 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.016914 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.016932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.016940 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.017766 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.017824 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.017848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.018431 4755 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="2ccf87d0c9abfae5e7978f134c48167ee4e43a523b6d9ae7b9967259dbd1b968" exitCode=0 Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.018460 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"2ccf87d0c9abfae5e7978f134c48167ee4e43a523b6d9ae7b9967259dbd1b968"} Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.018495 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.019277 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.019303 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.019317 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.021378 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745"} Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.021402 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1"} Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.021413 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300"} Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.021421 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808"} Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.021454 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.022256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.022278 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.022285 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:58 crc kubenswrapper[4755]: W1124 01:12:58.845817 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:58 crc kubenswrapper[4755]: E1124 01:12:58.846179 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.103:6443: connect: connection refused" logger="UnhandledError" Nov 24 01:12:58 crc kubenswrapper[4755]: I1124 01:12:58.934776 4755 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:58 crc kubenswrapper[4755]: E1124 01:12:58.956706 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.103:6443: connect: connection refused" interval="3.2s" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.027578 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7"} Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.027655 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78"} Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.027670 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7"} Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.027760 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.029028 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.029063 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.029076 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.032226 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e4053bdcbd61691699f85f36a3a24d3dffc18bc70e0fcfb2e0a43d9574469b0f"} Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.032273 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.032285 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae"} Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.032304 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790"} Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.032320 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57"} Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.032336 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb"} Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.033143 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.033175 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.033187 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.034129 4755 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a" exitCode=0 Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.034196 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a"} Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.034253 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.035282 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.035307 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.035318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.038586 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"05b738b3ec74c0cc3ba0b2e527320b6c06aa4af4b91a2135f26b299a9dc7ea19"} Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.038668 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.038698 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.039480 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.039504 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.039515 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.040253 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.040280 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.040288 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.186515 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.187510 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.187542 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.187551 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:12:59 crc kubenswrapper[4755]: I1124 01:12:59.187579 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 01:12:59 crc kubenswrapper[4755]: E1124 01:12:59.188110 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.103:6443: connect: connection refused" node="crc" Nov 24 01:12:59 crc kubenswrapper[4755]: W1124 01:12:59.272894 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.103:6443: connect: connection refused Nov 24 01:12:59 crc kubenswrapper[4755]: E1124 01:12:59.273005 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.103:6443: connect: connection refused" logger="UnhandledError" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.044323 4755 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba" exitCode=0 Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.044435 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.044461 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.044463 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.044528 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.044569 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba"} Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.044631 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.044543 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.045711 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.045739 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.045749 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.046067 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.046114 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.046137 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.046441 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.046494 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.046512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.046518 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.046528 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.046538 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.244382 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.318009 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.318355 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.319937 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.319997 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.320015 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.326127 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.530559 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:13:00 crc kubenswrapper[4755]: I1124 01:13:00.893253 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.051069 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0"} Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.051124 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0"} Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.051142 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1"} Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.051157 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215"} Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.051185 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.051244 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.052336 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.052382 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.052398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.052426 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.052454 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:01 crc kubenswrapper[4755]: I1124 01:13:01.052463 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.059942 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1"} Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.060078 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.060718 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.060311 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.060194 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.062292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.062358 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.062378 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.062755 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.062818 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.062844 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.062943 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.062999 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.063019 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.098075 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.388298 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.389978 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.390033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.390046 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:02 crc kubenswrapper[4755]: I1124 01:13:02.390078 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.062420 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.062431 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.064497 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.064570 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.064579 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.064641 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.064653 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.064710 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.684668 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.684956 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.687336 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.687376 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:03 crc kubenswrapper[4755]: I1124 01:13:03.687385 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:04 crc kubenswrapper[4755]: I1124 01:13:04.376531 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Nov 24 01:13:04 crc kubenswrapper[4755]: I1124 01:13:04.376956 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:04 crc kubenswrapper[4755]: I1124 01:13:04.379461 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:04 crc kubenswrapper[4755]: I1124 01:13:04.379531 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:04 crc kubenswrapper[4755]: I1124 01:13:04.379549 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:06 crc kubenswrapper[4755]: E1124 01:13:06.083997 4755 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 24 01:13:06 crc kubenswrapper[4755]: I1124 01:13:06.132902 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:13:06 crc kubenswrapper[4755]: I1124 01:13:06.133100 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:06 crc kubenswrapper[4755]: I1124 01:13:06.134539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:06 crc kubenswrapper[4755]: I1124 01:13:06.134645 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:06 crc kubenswrapper[4755]: I1124 01:13:06.134722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:06 crc kubenswrapper[4755]: I1124 01:13:06.187138 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 01:13:06 crc kubenswrapper[4755]: I1124 01:13:06.187458 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:06 crc kubenswrapper[4755]: I1124 01:13:06.189453 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:06 crc kubenswrapper[4755]: I1124 01:13:06.189527 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:06 crc kubenswrapper[4755]: I1124 01:13:06.189547 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:08 crc kubenswrapper[4755]: I1124 01:13:08.391053 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Nov 24 01:13:08 crc kubenswrapper[4755]: I1124 01:13:08.391264 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:08 crc kubenswrapper[4755]: I1124 01:13:08.392586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:08 crc kubenswrapper[4755]: I1124 01:13:08.392645 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:08 crc kubenswrapper[4755]: I1124 01:13:08.392657 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:09 crc kubenswrapper[4755]: I1124 01:13:09.133878 4755 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 24 01:13:09 crc kubenswrapper[4755]: I1124 01:13:09.133996 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 24 01:13:09 crc kubenswrapper[4755]: W1124 01:13:09.840975 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Nov 24 01:13:09 crc kubenswrapper[4755]: I1124 01:13:09.841160 4755 trace.go:236] Trace[749224927]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 01:12:59.839) (total time: 10001ms): Nov 24 01:13:09 crc kubenswrapper[4755]: Trace[749224927]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (01:13:09.840) Nov 24 01:13:09 crc kubenswrapper[4755]: Trace[749224927]: [10.001887376s] [10.001887376s] END Nov 24 01:13:09 crc kubenswrapper[4755]: E1124 01:13:09.841205 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Nov 24 01:13:09 crc kubenswrapper[4755]: I1124 01:13:09.936258 4755 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Nov 24 01:13:10 crc kubenswrapper[4755]: W1124 01:13:10.068685 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Nov 24 01:13:10 crc kubenswrapper[4755]: I1124 01:13:10.068782 4755 trace.go:236] Trace[748427604]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 01:13:00.066) (total time: 10002ms): Nov 24 01:13:10 crc kubenswrapper[4755]: Trace[748427604]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (01:13:10.068) Nov 24 01:13:10 crc kubenswrapper[4755]: Trace[748427604]: [10.002065011s] [10.002065011s] END Nov 24 01:13:10 crc kubenswrapper[4755]: E1124 01:13:10.068814 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Nov 24 01:13:10 crc kubenswrapper[4755]: I1124 01:13:10.173293 4755 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 24 01:13:10 crc kubenswrapper[4755]: I1124 01:13:10.173376 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 24 01:13:10 crc kubenswrapper[4755]: I1124 01:13:10.181693 4755 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 24 01:13:10 crc kubenswrapper[4755]: I1124 01:13:10.181744 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 24 01:13:10 crc kubenswrapper[4755]: I1124 01:13:10.249429 4755 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]log ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]etcd ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/openshift.io-api-request-count-filter ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/openshift.io-startkubeinformers ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/start-apiserver-admission-initializer ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/generic-apiserver-start-informers ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/priority-and-fairness-config-consumer ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/priority-and-fairness-filter ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/storage-object-count-tracker-hook ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/start-apiextensions-informers ok Nov 24 01:13:10 crc kubenswrapper[4755]: [-]poststarthook/start-apiextensions-controllers failed: reason withheld Nov 24 01:13:10 crc kubenswrapper[4755]: [-]poststarthook/crd-informer-synced failed: reason withheld Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/start-system-namespaces-controller ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/start-cluster-authentication-info-controller ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/start-legacy-token-tracking-controller ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/start-service-ip-repair-controllers ok Nov 24 01:13:10 crc kubenswrapper[4755]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Nov 24 01:13:10 crc kubenswrapper[4755]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/priority-and-fairness-config-producer ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/bootstrap-controller ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/start-kube-aggregator-informers ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/apiservice-status-local-available-controller ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/apiservice-status-remote-available-controller ok Nov 24 01:13:10 crc kubenswrapper[4755]: [-]poststarthook/apiservice-registration-controller failed: reason withheld Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/apiservice-wait-for-first-sync ok Nov 24 01:13:10 crc kubenswrapper[4755]: [-]poststarthook/apiservice-discovery-controller failed: reason withheld Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/kube-apiserver-autoregistration ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]autoregister-completion ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/apiservice-openapi-controller ok Nov 24 01:13:10 crc kubenswrapper[4755]: [+]poststarthook/apiservice-openapiv3-controller ok Nov 24 01:13:10 crc kubenswrapper[4755]: livez check failed Nov 24 01:13:10 crc kubenswrapper[4755]: I1124 01:13:10.249489 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:13:10 crc kubenswrapper[4755]: I1124 01:13:10.531724 4755 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 24 01:13:10 crc kubenswrapper[4755]: I1124 01:13:10.531807 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 24 01:13:11 crc kubenswrapper[4755]: I1124 01:13:11.091532 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 24 01:13:11 crc kubenswrapper[4755]: I1124 01:13:11.094279 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e4053bdcbd61691699f85f36a3a24d3dffc18bc70e0fcfb2e0a43d9574469b0f" exitCode=255 Nov 24 01:13:11 crc kubenswrapper[4755]: I1124 01:13:11.094329 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e4053bdcbd61691699f85f36a3a24d3dffc18bc70e0fcfb2e0a43d9574469b0f"} Nov 24 01:13:11 crc kubenswrapper[4755]: I1124 01:13:11.094505 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:11 crc kubenswrapper[4755]: I1124 01:13:11.095435 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:11 crc kubenswrapper[4755]: I1124 01:13:11.095481 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:11 crc kubenswrapper[4755]: I1124 01:13:11.095491 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:11 crc kubenswrapper[4755]: I1124 01:13:11.096008 4755 scope.go:117] "RemoveContainer" containerID="e4053bdcbd61691699f85f36a3a24d3dffc18bc70e0fcfb2e0a43d9574469b0f" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.097775 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.099002 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.100355 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a" exitCode=255 Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.100409 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a"} Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.100487 4755 scope.go:117] "RemoveContainer" containerID="e4053bdcbd61691699f85f36a3a24d3dffc18bc70e0fcfb2e0a43d9574469b0f" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.100648 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.101884 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.101913 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.101924 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.103225 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.103450 4755 scope.go:117] "RemoveContainer" containerID="464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.103474 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 01:13:12 crc kubenswrapper[4755]: E1124 01:13:12.104186 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.107952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.107983 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:12 crc kubenswrapper[4755]: I1124 01:13:12.107995 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:13 crc kubenswrapper[4755]: I1124 01:13:13.103437 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.012926 4755 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.156476 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.157915 4755 trace.go:236] Trace[730103515]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 01:13:03.429) (total time: 11728ms): Nov 24 01:13:15 crc kubenswrapper[4755]: Trace[730103515]: ---"Objects listed" error: 11728ms (01:13:15.157) Nov 24 01:13:15 crc kubenswrapper[4755]: Trace[730103515]: [11.728467485s] [11.728467485s] END Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.157945 4755 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.160411 4755 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.162106 4755 trace.go:236] Trace[784533020]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 01:13:04.468) (total time: 10693ms): Nov 24 01:13:15 crc kubenswrapper[4755]: Trace[784533020]: ---"Objects listed" error: 10693ms (01:13:15.161) Nov 24 01:13:15 crc kubenswrapper[4755]: Trace[784533020]: [10.693882943s] [10.693882943s] END Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.162319 4755 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.164750 4755 kubelet_node_status.go:115] "Node was previously registered" node="crc" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.165080 4755 kubelet_node_status.go:79] "Successfully registered node" node="crc" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.166305 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.166346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.166358 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.166380 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.166391 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.176510 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.181711 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.181875 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.181987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.182090 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.182184 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.191349 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.194570 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.194643 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.194656 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.194681 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.194695 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.207445 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.212977 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.213036 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.213048 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.213069 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.213081 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.225293 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.229617 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.229653 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.229664 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.229684 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.229694 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.239151 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.239296 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.240721 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.240757 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.240767 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.240785 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.240795 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.249291 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.252908 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.343792 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.343840 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.343854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.343876 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.343889 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.400245 4755 scope.go:117] "RemoveContainer" containerID="464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a" Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.400691 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.446161 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.446206 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.446220 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.446240 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.446251 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.548444 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.548496 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.548505 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.548522 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.548531 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.650815 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.650895 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.650918 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.650964 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.650977 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.704696 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.753823 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.753868 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.753879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.753898 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.753910 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.855966 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.856004 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.856012 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.856029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.856041 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.931663 4755 apiserver.go:52] "Watching apiserver" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.934847 4755 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.935570 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-diagnostics/network-check-target-xd92c","openshift-dns/node-resolver-dt8lz","openshift-machine-config-operator/machine-config-daemon-h8xzm","openshift-multus/multus-additional-cni-plugins-zb6qq","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.936135 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.936238 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.936249 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.936385 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.936501 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.937061 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.937063 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.937138 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.937154 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dt8lz" Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.937392 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.937653 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.937671 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.940519 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.940573 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.940757 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.940885 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.940921 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.942080 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.942297 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.942326 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.942429 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.942503 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.942719 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.942969 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.943666 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.943993 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.944273 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.944437 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.944774 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.945207 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.945465 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.945573 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.946654 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.947088 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.956947 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.958192 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.958226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.958237 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.958253 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.958265 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:15Z","lastTransitionTime":"2025-11-24T01:13:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.964398 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.964437 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.964484 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.964508 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.964533 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.964555 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.964578 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.964600 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.965031 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.965315 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.965972 4755 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.966384 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.970986 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.975117 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.977039 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.978838 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.978875 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.978893 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.978951 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:16.478933552 +0000 UTC m=+21.164999053 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.989365 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.989398 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.989411 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:15 crc kubenswrapper[4755]: E1124 01:13:15.989471 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:16.489456069 +0000 UTC m=+21.175521570 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.989689 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.990112 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 01:13:15 crc kubenswrapper[4755]: I1124 01:13:15.999481 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.006685 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.021921 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.036879 4755 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.039022 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.060912 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.060946 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.060956 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.060971 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.061009 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:16Z","lastTransitionTime":"2025-11-24T01:13:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.063045 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065402 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065447 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065473 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065496 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065519 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065543 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065563 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065590 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065646 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065669 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065690 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065714 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065737 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065760 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065782 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065807 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065832 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065855 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065849 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065879 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065903 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065930 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065956 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065979 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066000 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066024 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066046 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066075 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066100 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066124 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066147 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066170 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066224 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066249 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066277 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066302 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066330 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066353 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066379 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066401 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066426 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066449 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066473 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066493 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066513 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066536 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066557 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066578 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066621 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066642 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066663 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066683 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066705 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066727 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066748 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066774 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066799 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066821 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066845 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066867 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.065862 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066045 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066404 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067158 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066401 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066511 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066612 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067188 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066754 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066797 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066836 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.066948 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067023 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067038 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067292 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067307 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067075 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067109 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067355 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067404 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067470 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067496 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067512 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067536 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067639 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067729 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067731 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067747 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067782 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067804 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067920 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067936 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.067981 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.068005 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.068108 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.068151 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.068272 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.068475 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.068714 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.068779 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.068848 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.069075 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.069404 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070228 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070221 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070320 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070329 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4ngwk"] Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070344 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070475 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070492 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070630 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070666 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070735 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070761 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070815 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070850 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070897 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070930 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070959 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.070983 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071014 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071041 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071067 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071075 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071091 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071113 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071134 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071160 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071200 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071386 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071394 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-8pm69"] Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071454 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071664 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071688 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071693 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071908 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.071920 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.072287 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.072379 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.072474 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.073000 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.073271 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.073483 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.073759 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.074681 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.074728 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.074818 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.074845 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.074881 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.074905 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.074929 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.074952 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075029 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075284 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075300 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075338 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075363 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075370 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075395 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075419 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075393 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075442 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.075480 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:13:16.575461573 +0000 UTC m=+21.261527074 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075784 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075526 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075789 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075845 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075868 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075887 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075904 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075924 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075975 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075997 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076017 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076036 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076057 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076077 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076093 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076112 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076132 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076153 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076176 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076198 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076222 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076243 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076263 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076283 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076305 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076324 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076345 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076364 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076385 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076408 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076427 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076446 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076466 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076486 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076507 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076527 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076549 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076568 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076621 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076647 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076665 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076684 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076702 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076727 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076747 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076765 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076786 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076807 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076856 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076874 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076893 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076914 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076932 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076951 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076970 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076988 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077009 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077026 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077050 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077071 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077088 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077106 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077124 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077143 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077160 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077177 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077196 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077216 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077236 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077258 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077279 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077300 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077322 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077341 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077361 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077381 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077400 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077426 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077385 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077445 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077469 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077491 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077509 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077528 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077546 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077567 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077586 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077643 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077663 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077685 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077709 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077730 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077750 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077769 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077788 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077808 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077828 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077848 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077868 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077888 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077905 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077922 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077940 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077957 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077975 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077994 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078017 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078041 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078060 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078080 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078100 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078118 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078140 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078204 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078237 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/58ea3cf6-1f18-428e-9b3f-6064671faf72-hosts-file\") pod \"node-resolver-dt8lz\" (UID: \"58ea3cf6-1f18-428e-9b3f-6064671faf72\") " pod="openshift-dns/node-resolver-dt8lz" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078257 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-system-cni-dir\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078282 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b1962128-02a0-46c3-82c2-5055c2aed0b9-proxy-tls\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078300 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b1962128-02a0-46c3-82c2-5055c2aed0b9-mcd-auth-proxy-config\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078333 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hcgb\" (UniqueName: \"kubernetes.io/projected/b1962128-02a0-46c3-82c2-5055c2aed0b9-kube-api-access-4hcgb\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078352 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-os-release\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078369 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgwd7\" (UniqueName: \"kubernetes.io/projected/27983572-2d9c-43d6-a7f0-445a0aec0531-kube-api-access-mgwd7\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078389 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078432 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-cnibin\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078451 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/27983572-2d9c-43d6-a7f0-445a0aec0531-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078493 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078518 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b1962128-02a0-46c3-82c2-5055c2aed0b9-rootfs\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078538 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078560 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078579 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078629 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078648 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc2wq\" (UniqueName: \"kubernetes.io/projected/58ea3cf6-1f18-428e-9b3f-6064671faf72-kube-api-access-fc2wq\") pod \"node-resolver-dt8lz\" (UID: \"58ea3cf6-1f18-428e-9b3f-6064671faf72\") " pod="openshift-dns/node-resolver-dt8lz" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078696 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/27983572-2d9c-43d6-a7f0-445a0aec0531-cni-binary-copy\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078769 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078782 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078793 4755 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078804 4755 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078816 4755 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078827 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078840 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078852 4755 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078863 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078874 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078885 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078895 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081642 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081678 4755 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081693 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081706 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081729 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081748 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081760 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081771 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081788 4755 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081799 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081809 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081831 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081841 4755 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081852 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081862 4755 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081878 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081889 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081899 4755 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081910 4755 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081925 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081935 4755 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081945 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081955 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081969 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081979 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.081990 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082005 4755 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082015 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082027 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082036 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075899 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.075921 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076183 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076321 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076330 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076357 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076423 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076649 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076698 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076783 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076945 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.076967 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077154 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.083592 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077342 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077360 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077639 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077937 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.077987 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078005 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078488 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078503 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078781 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082129 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082343 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.078559 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082388 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082525 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082741 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082818 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.082997 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.083142 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.083345 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.083412 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.083726 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.083937 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084116 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084088 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084238 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084283 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084444 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084460 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084644 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084670 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084885 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084849 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.084884 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.085184 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.085194 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.085253 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.085271 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.085514 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.085553 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.085675 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.085705 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.085874 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.085875 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.086076 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.086320 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.086639 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087016 4755 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087048 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087072 4755 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087088 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087105 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087119 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087139 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087153 4755 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087166 4755 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087183 4755 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087199 4755 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087213 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087229 4755 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087248 4755 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087261 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087273 4755 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087287 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087303 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087315 4755 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087318 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087328 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.087574 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.088994 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089070 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089096 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089107 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089118 4755 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089133 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089144 4755 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089157 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089170 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089187 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089445 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089472 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089492 4755 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.089514 4755 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.090968 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.091011 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.091427 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.091426 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.091051 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:16.591031698 +0000 UTC m=+21.277097199 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.091659 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.091947 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.092260 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.092764 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.092915 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.093067 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.093101 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.093118 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.093024 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.093550 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.093625 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.093762 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.092345 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.093798 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.093855 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.094417 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.094467 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.093111 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.094690 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.094698 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.094819 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.094879 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:16.59485781 +0000 UTC m=+21.280923311 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.095436 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.095652 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.095919 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.096052 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.097500 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.097549 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.097560 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.097623 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.097985 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.098044 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.098200 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.098368 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.098393 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.098463 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.098641 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.098365 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.099116 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.099462 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.099903 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.100948 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.100993 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.101079 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.101188 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.101483 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.103629 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.104006 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.103997 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.104726 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.105003 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.105276 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.106010 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.107949 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.109415 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.109538 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.109730 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.110166 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.110231 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.113639 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.114942 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.116563 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.117199 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.117511 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.117547 4755 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.117753 4755 scope.go:117] "RemoveContainer" containerID="464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a" Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.118050 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.118059 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.121966 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.130234 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.130406 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.135862 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.137415 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.137855 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.141744 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.144323 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.145097 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.151788 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.160380 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.163493 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.163525 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.163537 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.163574 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.163590 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:16Z","lastTransitionTime":"2025-11-24T01:13:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.168728 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.190992 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-var-lib-kubelet\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191049 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-ovn\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191077 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-cnibin\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191098 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/27983572-2d9c-43d6-a7f0-445a0aec0531-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191119 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-var-lib-cni-bin\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191138 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-hostroot\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191164 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-var-lib-openvswitch\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191186 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191208 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-cnibin\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191226 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-kubelet\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191241 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-slash\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191254 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-systemd\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191270 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b1962128-02a0-46c3-82c2-5055c2aed0b9-rootfs\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191287 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-systemd-units\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191303 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191327 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovn-node-metrics-cert\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191350 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/19dbf7ff-f684-4c57-803a-83b39e0705a4-cni-binary-copy\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191365 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-etc-kubernetes\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191380 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-openvswitch\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191394 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-ovn-kubernetes\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191412 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/58ea3cf6-1f18-428e-9b3f-6064671faf72-hosts-file\") pod \"node-resolver-dt8lz\" (UID: \"58ea3cf6-1f18-428e-9b3f-6064671faf72\") " pod="openshift-dns/node-resolver-dt8lz" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191427 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-system-cni-dir\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191448 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-cni-dir\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191462 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-daemon-config\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191476 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-bin\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191492 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-netd\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191514 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b1962128-02a0-46c3-82c2-5055c2aed0b9-mcd-auth-proxy-config\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191534 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-system-cni-dir\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191549 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6k7s\" (UniqueName: \"kubernetes.io/projected/19dbf7ff-f684-4c57-803a-83b39e0705a4-kube-api-access-c6k7s\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191564 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-node-log\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191579 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hcgb\" (UniqueName: \"kubernetes.io/projected/b1962128-02a0-46c3-82c2-5055c2aed0b9-kube-api-access-4hcgb\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191595 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-os-release\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191643 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgwd7\" (UniqueName: \"kubernetes.io/projected/27983572-2d9c-43d6-a7f0-445a0aec0531-kube-api-access-mgwd7\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191673 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-conf-dir\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191688 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-run-multus-certs\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191711 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-os-release\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191724 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-netns\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191745 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-var-lib-cni-multus\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191760 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-socket-dir-parent\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191779 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191794 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-run-k8s-cni-cncf-io\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191810 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-log-socket\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191825 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-config\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191838 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-env-overrides\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191861 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc2wq\" (UniqueName: \"kubernetes.io/projected/58ea3cf6-1f18-428e-9b3f-6064671faf72-kube-api-access-fc2wq\") pod \"node-resolver-dt8lz\" (UID: \"58ea3cf6-1f18-428e-9b3f-6064671faf72\") " pod="openshift-dns/node-resolver-dt8lz" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191877 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/27983572-2d9c-43d6-a7f0-445a0aec0531-cni-binary-copy\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191891 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-etc-openvswitch\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191906 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-script-lib\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191927 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b1962128-02a0-46c3-82c2-5055c2aed0b9-proxy-tls\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191942 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-run-netns\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.191956 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nhqm\" (UniqueName: \"kubernetes.io/projected/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-kube-api-access-5nhqm\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192010 4755 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192020 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192030 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192039 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192048 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192058 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192068 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192081 4755 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192092 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192102 4755 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192113 4755 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192123 4755 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192133 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192143 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192154 4755 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192165 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192174 4755 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192184 4755 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192194 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192206 4755 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192217 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192228 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192239 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192249 4755 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192261 4755 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192271 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192281 4755 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192293 4755 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192303 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192313 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192323 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192334 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192344 4755 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192355 4755 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192366 4755 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192376 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192386 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192396 4755 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192407 4755 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192419 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192430 4755 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192441 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192453 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192463 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192474 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192484 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192495 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192507 4755 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192517 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192527 4755 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192538 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192549 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192559 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192569 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192581 4755 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192594 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192627 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192641 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192704 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192761 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.192832 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-cnibin\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.193404 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b1962128-02a0-46c3-82c2-5055c2aed0b9-mcd-auth-proxy-config\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.193562 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/27983572-2d9c-43d6-a7f0-445a0aec0531-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.193621 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.193671 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.193714 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b1962128-02a0-46c3-82c2-5055c2aed0b9-rootfs\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.193777 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/58ea3cf6-1f18-428e-9b3f-6064671faf72-hosts-file\") pod \"node-resolver-dt8lz\" (UID: \"58ea3cf6-1f18-428e-9b3f-6064671faf72\") " pod="openshift-dns/node-resolver-dt8lz" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.193805 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-system-cni-dir\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.193843 4755 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.193831 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.194032 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-os-release\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.194669 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/27983572-2d9c-43d6-a7f0-445a0aec0531-cni-binary-copy\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.194766 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/27983572-2d9c-43d6-a7f0-445a0aec0531-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.194945 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.194962 4755 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.194976 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.194989 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195001 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195012 4755 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195023 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195034 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195048 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195059 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195072 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195084 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195096 4755 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195107 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195118 4755 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195129 4755 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195141 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195153 4755 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195165 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195175 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195186 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195198 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195209 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195220 4755 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195231 4755 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195244 4755 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195255 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195265 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195277 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195288 4755 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195299 4755 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195311 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195322 4755 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195333 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195344 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195354 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195365 4755 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195378 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195389 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195400 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195411 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195422 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195433 4755 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195444 4755 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195456 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195467 4755 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195479 4755 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195491 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195502 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195513 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195526 4755 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195539 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195550 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195561 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195571 4755 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195583 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.195593 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.197480 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b1962128-02a0-46c3-82c2-5055c2aed0b9-proxy-tls\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.214361 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc2wq\" (UniqueName: \"kubernetes.io/projected/58ea3cf6-1f18-428e-9b3f-6064671faf72-kube-api-access-fc2wq\") pod \"node-resolver-dt8lz\" (UID: \"58ea3cf6-1f18-428e-9b3f-6064671faf72\") " pod="openshift-dns/node-resolver-dt8lz" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.216086 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.221978 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgwd7\" (UniqueName: \"kubernetes.io/projected/27983572-2d9c-43d6-a7f0-445a0aec0531-kube-api-access-mgwd7\") pod \"multus-additional-cni-plugins-zb6qq\" (UID: \"27983572-2d9c-43d6-a7f0-445a0aec0531\") " pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.223103 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hcgb\" (UniqueName: \"kubernetes.io/projected/b1962128-02a0-46c3-82c2-5055c2aed0b9-kube-api-access-4hcgb\") pod \"machine-config-daemon-h8xzm\" (UID: \"b1962128-02a0-46c3-82c2-5055c2aed0b9\") " pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.232137 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.235844 4755 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.247239 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.247544 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.256972 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 01:13:16 crc kubenswrapper[4755]: W1124 01:13:16.260186 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-e5f0a67bfb925903bd7cfd251716748f9b526b9f8571ce969ea1c15e4ce83382 WatchSource:0}: Error finding container e5f0a67bfb925903bd7cfd251716748f9b526b9f8571ce969ea1c15e4ce83382: Status 404 returned error can't find the container with id e5f0a67bfb925903bd7cfd251716748f9b526b9f8571ce969ea1c15e4ce83382 Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.261075 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.262749 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dt8lz" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.265421 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.265453 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.265462 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.265477 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.265487 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:16Z","lastTransitionTime":"2025-11-24T01:13:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.271471 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.271705 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.289749 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.295993 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-systemd-units\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296101 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296174 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovn-node-metrics-cert\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296249 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/19dbf7ff-f684-4c57-803a-83b39e0705a4-cni-binary-copy\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296312 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-etc-kubernetes\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296377 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-openvswitch\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296437 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-ovn-kubernetes\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296507 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-cni-dir\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296570 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-daemon-config\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296643 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-bin\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296704 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-netd\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296792 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-system-cni-dir\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296860 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6k7s\" (UniqueName: \"kubernetes.io/projected/19dbf7ff-f684-4c57-803a-83b39e0705a4-kube-api-access-c6k7s\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296922 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-node-log\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296990 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-conf-dir\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297051 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-run-multus-certs\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297092 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-ovn-kubernetes\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297117 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-os-release\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297195 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-netns\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297246 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-var-lib-cni-multus\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297280 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-socket-dir-parent\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297305 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-run-k8s-cni-cncf-io\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297328 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-log-socket\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297352 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-config\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297372 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-env-overrides\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297394 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-script-lib\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297421 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-etc-openvswitch\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297452 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-run-netns\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297467 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nhqm\" (UniqueName: \"kubernetes.io/projected/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-kube-api-access-5nhqm\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297484 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-var-lib-kubelet\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297500 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-ovn\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297517 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-var-lib-cni-bin\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297533 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-hostroot\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297548 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-var-lib-openvswitch\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297565 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-cnibin\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297581 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-kubelet\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297617 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-slash\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297639 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-systemd\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297703 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-systemd\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296000 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.297901 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-os-release\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.298061 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-cni-dir\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.298443 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/19dbf7ff-f684-4c57-803a-83b39e0705a4-cni-binary-copy\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296173 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.298505 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-etc-kubernetes\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.296144 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-systemd-units\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.298539 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-openvswitch\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.298559 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-netns\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.298580 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-var-lib-cni-multus\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.298649 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-socket-dir-parent\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.298681 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-run-k8s-cni-cncf-io\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.298703 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-log-socket\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.299259 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-config\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.299347 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-daemon-config\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.299441 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-bin\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.299515 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-netd\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.299596 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-system-cni-dir\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.299653 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-env-overrides\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.299942 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-node-log\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300038 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-multus-conf-dir\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300094 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-etc-openvswitch\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300071 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-script-lib\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300146 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-run-netns\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300226 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-run-multus-certs\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300364 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-var-lib-kubelet\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300404 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-ovn\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300427 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-host-var-lib-cni-bin\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300445 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-hostroot\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300465 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-var-lib-openvswitch\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300495 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/19dbf7ff-f684-4c57-803a-83b39e0705a4-cnibin\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300516 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-kubelet\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.300538 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-slash\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: W1124 01:13:16.303273 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-d62e87fbfd10e7a6d7bc8a5143a6c9171ea45898c9d154139b389a2a62e315e3 WatchSource:0}: Error finding container d62e87fbfd10e7a6d7bc8a5143a6c9171ea45898c9d154139b389a2a62e315e3: Status 404 returned error can't find the container with id d62e87fbfd10e7a6d7bc8a5143a6c9171ea45898c9d154139b389a2a62e315e3 Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.304715 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovn-node-metrics-cert\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.310213 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.316292 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nhqm\" (UniqueName: \"kubernetes.io/projected/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-kube-api-access-5nhqm\") pod \"ovnkube-node-4ngwk\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.317857 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.321085 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6k7s\" (UniqueName: \"kubernetes.io/projected/19dbf7ff-f684-4c57-803a-83b39e0705a4-kube-api-access-c6k7s\") pod \"multus-8pm69\" (UID: \"19dbf7ff-f684-4c57-803a-83b39e0705a4\") " pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.328908 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: W1124 01:13:16.329265 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27983572_2d9c_43d6_a7f0_445a0aec0531.slice/crio-0de37823e7206d5ce234a54c98587ceea87ea9233efc9336b4709dad5a02a7c5 WatchSource:0}: Error finding container 0de37823e7206d5ce234a54c98587ceea87ea9233efc9336b4709dad5a02a7c5: Status 404 returned error can't find the container with id 0de37823e7206d5ce234a54c98587ceea87ea9233efc9336b4709dad5a02a7c5 Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.339419 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.349901 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: W1124 01:13:16.359851 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1962128_02a0_46c3_82c2_5055c2aed0b9.slice/crio-9902a155bea6e70cb534daa3d0a8cf8ab9a86f18978489f32bc153bfc006a345 WatchSource:0}: Error finding container 9902a155bea6e70cb534daa3d0a8cf8ab9a86f18978489f32bc153bfc006a345: Status 404 returned error can't find the container with id 9902a155bea6e70cb534daa3d0a8cf8ab9a86f18978489f32bc153bfc006a345 Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.359936 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.371154 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.371193 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.371204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.371221 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.371233 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:16Z","lastTransitionTime":"2025-11-24T01:13:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.371866 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.382276 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.393119 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.403450 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.404345 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.415389 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.417191 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8pm69" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.424214 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: W1124 01:13:16.429067 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3b1d3cb_ffbd_4034_832d_6577ccf2f780.slice/crio-7fb662d2864d7cf4a1f4f1c263683e549f040a12ab6ec7b20aeb38ea0616ec7c WatchSource:0}: Error finding container 7fb662d2864d7cf4a1f4f1c263683e549f040a12ab6ec7b20aeb38ea0616ec7c: Status 404 returned error can't find the container with id 7fb662d2864d7cf4a1f4f1c263683e549f040a12ab6ec7b20aeb38ea0616ec7c Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.436332 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.451090 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.461498 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.480337 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.482108 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.482132 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.482143 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.482158 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.482169 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:16Z","lastTransitionTime":"2025-11-24T01:13:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.499503 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.499548 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.499708 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.499735 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.499749 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.499796 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:17.499779308 +0000 UTC m=+22.185844809 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.500077 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.500097 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.500107 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.500133 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:17.500123428 +0000 UTC m=+22.186188929 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.585884 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.586163 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.586172 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.586186 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.586198 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:16Z","lastTransitionTime":"2025-11-24T01:13:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.600452 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.600564 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.600619 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.600653 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:13:17.600596925 +0000 UTC m=+22.286662496 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.600815 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.600854 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:17.600843802 +0000 UTC m=+22.286909403 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.600760 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.601205 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:17.601194172 +0000 UTC m=+22.287259673 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.687990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.688023 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.688034 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.688050 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.688063 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:16Z","lastTransitionTime":"2025-11-24T01:13:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.791054 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.791105 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.791118 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.791137 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.791148 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:16Z","lastTransitionTime":"2025-11-24T01:13:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.893008 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.893036 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.893044 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.893058 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.893067 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:16Z","lastTransitionTime":"2025-11-24T01:13:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.995039 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.995080 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.995094 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.995108 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.995117 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:16Z","lastTransitionTime":"2025-11-24T01:13:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:16 crc kubenswrapper[4755]: I1124 01:13:16.995657 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:16 crc kubenswrapper[4755]: E1124 01:13:16.995792 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.097256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.097306 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.097318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.097335 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.097350 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:17Z","lastTransitionTime":"2025-11-24T01:13:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.115430 4755 generic.go:334] "Generic (PLEG): container finished" podID="27983572-2d9c-43d6-a7f0-445a0aec0531" containerID="48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7" exitCode=0 Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.115506 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" event={"ID":"27983572-2d9c-43d6-a7f0-445a0aec0531","Type":"ContainerDied","Data":"48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.115538 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" event={"ID":"27983572-2d9c-43d6-a7f0-445a0aec0531","Type":"ContainerStarted","Data":"0de37823e7206d5ce234a54c98587ceea87ea9233efc9336b4709dad5a02a7c5"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.118081 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dt8lz" event={"ID":"58ea3cf6-1f18-428e-9b3f-6064671faf72","Type":"ContainerStarted","Data":"f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.118133 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dt8lz" event={"ID":"58ea3cf6-1f18-428e-9b3f-6064671faf72","Type":"ContainerStarted","Data":"49d5e214b86c7ec9ba76910042d99bde26e90527549b32b72bb4d4804c7c326d"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.119562 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d" exitCode=0 Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.119650 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.119722 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"7fb662d2864d7cf4a1f4f1c263683e549f040a12ab6ec7b20aeb38ea0616ec7c"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.120322 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d62e87fbfd10e7a6d7bc8a5143a6c9171ea45898c9d154139b389a2a62e315e3"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.122298 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.122348 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.122363 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"29e71db3f492ff60dcd92cb2541687fa40bc850f43d0c8036a67d5322a7b852c"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.123419 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.123445 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"e5f0a67bfb925903bd7cfd251716748f9b526b9f8571ce969ea1c15e4ce83382"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.124645 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8pm69" event={"ID":"19dbf7ff-f684-4c57-803a-83b39e0705a4","Type":"ContainerStarted","Data":"090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.124676 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8pm69" event={"ID":"19dbf7ff-f684-4c57-803a-83b39e0705a4","Type":"ContainerStarted","Data":"d83429779a3c7abfc9ce6f2a1af6183ce03efa6d7e1a73e827d200cdb25bd24c"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.126410 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.126448 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.126471 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"9902a155bea6e70cb534daa3d0a8cf8ab9a86f18978489f32bc153bfc006a345"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.126836 4755 scope.go:117] "RemoveContainer" containerID="464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a" Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.127016 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.132931 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.145839 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.159200 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.168150 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.177229 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.188047 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.199790 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.200395 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.200458 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.200468 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.200482 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.200492 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:17Z","lastTransitionTime":"2025-11-24T01:13:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.218548 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.234421 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.250898 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.262036 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.280295 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.294422 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.303113 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.303148 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.303156 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.303170 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.303181 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:17Z","lastTransitionTime":"2025-11-24T01:13:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.307198 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.318976 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.342625 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.354139 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.367358 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.380885 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.393919 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.405176 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.405214 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.405222 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.405263 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.405273 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:17Z","lastTransitionTime":"2025-11-24T01:13:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.409413 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.427912 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.447342 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.460067 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.481396 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.507485 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.507523 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.507532 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.507549 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.507560 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:17Z","lastTransitionTime":"2025-11-24T01:13:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.510258 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.510303 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.510427 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.510442 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.510454 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.510500 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:19.510487523 +0000 UTC m=+24.196553024 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.510512 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.510578 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.510592 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.510667 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:19.510651728 +0000 UTC m=+24.196717229 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.541349 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:17Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.609948 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.609987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.609998 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.610014 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.610059 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:17Z","lastTransitionTime":"2025-11-24T01:13:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.611197 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.611278 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.611326 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.611390 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.611424 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.611509 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:13:19.611389093 +0000 UTC m=+24.297454594 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.611558 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:19.611546038 +0000 UTC m=+24.297611539 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.611580 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:19.611571368 +0000 UTC m=+24.297636869 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.711769 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.712083 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.712098 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.712113 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.712123 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:17Z","lastTransitionTime":"2025-11-24T01:13:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.824156 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.824210 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.824253 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.824269 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.824285 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:17Z","lastTransitionTime":"2025-11-24T01:13:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.926392 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.926427 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.926437 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.926450 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.926461 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:17Z","lastTransitionTime":"2025-11-24T01:13:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.998290 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.998403 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:17 crc kubenswrapper[4755]: I1124 01:13:17.998744 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:17 crc kubenswrapper[4755]: E1124 01:13:17.998799 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.000635 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.001319 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.002715 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.003397 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.005000 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.005915 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.006808 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.007582 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.008547 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.009258 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.011494 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.012510 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.013855 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.014644 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.016295 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.017102 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.017965 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.019581 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.020867 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.021826 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.023205 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.024079 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.025427 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.026557 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.027233 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.029639 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.029663 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.029671 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.029685 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.029693 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:18Z","lastTransitionTime":"2025-11-24T01:13:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.030292 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.031330 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.032073 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.032740 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.033659 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.034183 4755 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.034297 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.036773 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.037343 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.037868 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.039535 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.040825 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.041458 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.042561 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.043268 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.044272 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.044954 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.046056 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.047113 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.047690 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.048751 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.049335 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.050735 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.051335 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.051901 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.053132 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.053896 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.054961 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.055467 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.130939 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.130968 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.130976 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.130990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.131000 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:18Z","lastTransitionTime":"2025-11-24T01:13:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.132534 4755 generic.go:334] "Generic (PLEG): container finished" podID="27983572-2d9c-43d6-a7f0-445a0aec0531" containerID="2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9" exitCode=0 Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.132593 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" event={"ID":"27983572-2d9c-43d6-a7f0-445a0aec0531","Type":"ContainerDied","Data":"2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.137629 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.137683 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.137693 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.137703 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.137711 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.144826 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.162891 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.178989 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.192200 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.203553 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.222545 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.233972 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.234018 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.234029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.234047 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.234066 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:18Z","lastTransitionTime":"2025-11-24T01:13:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.236495 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.260322 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.277286 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.290951 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.313150 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.329863 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.336685 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.336719 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.336730 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.336749 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.336763 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:18Z","lastTransitionTime":"2025-11-24T01:13:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.344299 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.416434 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.431000 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.434595 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.436164 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.439826 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.439853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.439862 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.439876 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.439887 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:18Z","lastTransitionTime":"2025-11-24T01:13:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.447045 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.466018 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.482624 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.495543 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.508816 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.523943 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.536937 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.541599 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.541644 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.541652 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.541669 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.541678 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:18Z","lastTransitionTime":"2025-11-24T01:13:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.549320 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.562473 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.577777 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.593892 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.606059 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.616262 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.628064 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.640717 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.644256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.644293 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.644305 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.644322 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.644333 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:18Z","lastTransitionTime":"2025-11-24T01:13:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.652341 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.670814 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.700917 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.743181 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.746368 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.746397 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.746406 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.746418 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.746428 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:18Z","lastTransitionTime":"2025-11-24T01:13:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.785973 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.830964 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.848181 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.848216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.848228 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.848243 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.848255 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:18Z","lastTransitionTime":"2025-11-24T01:13:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.867730 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.901074 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.939644 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.950248 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.950283 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.950294 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.950311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.950369 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:18Z","lastTransitionTime":"2025-11-24T01:13:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.981933 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:18Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:18 crc kubenswrapper[4755]: I1124 01:13:18.996528 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:18 crc kubenswrapper[4755]: E1124 01:13:18.996672 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.011001 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-vzkz4"] Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.011391 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.025562 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.034237 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.053017 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.053052 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.053062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.053077 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.053087 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:19Z","lastTransitionTime":"2025-11-24T01:13:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.053480 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.074015 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.093306 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.126147 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/58a85ee7-3417-491b-a375-99f140cfb5de-serviceca\") pod \"node-ca-vzkz4\" (UID: \"58a85ee7-3417-491b-a375-99f140cfb5de\") " pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.126184 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tts5\" (UniqueName: \"kubernetes.io/projected/58a85ee7-3417-491b-a375-99f140cfb5de-kube-api-access-7tts5\") pod \"node-ca-vzkz4\" (UID: \"58a85ee7-3417-491b-a375-99f140cfb5de\") " pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.126214 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/58a85ee7-3417-491b-a375-99f140cfb5de-host\") pod \"node-ca-vzkz4\" (UID: \"58a85ee7-3417-491b-a375-99f140cfb5de\") " pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.142664 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.145128 4755 generic.go:334] "Generic (PLEG): container finished" podID="27983572-2d9c-43d6-a7f0-445a0aec0531" containerID="c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e" exitCode=0 Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.145206 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" event={"ID":"27983572-2d9c-43d6-a7f0-445a0aec0531","Type":"ContainerDied","Data":"c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.149442 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.149658 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.155196 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.155232 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.155243 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.155259 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.155272 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:19Z","lastTransitionTime":"2025-11-24T01:13:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.183149 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.225492 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.226860 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/58a85ee7-3417-491b-a375-99f140cfb5de-serviceca\") pod \"node-ca-vzkz4\" (UID: \"58a85ee7-3417-491b-a375-99f140cfb5de\") " pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.226912 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tts5\" (UniqueName: \"kubernetes.io/projected/58a85ee7-3417-491b-a375-99f140cfb5de-kube-api-access-7tts5\") pod \"node-ca-vzkz4\" (UID: \"58a85ee7-3417-491b-a375-99f140cfb5de\") " pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.226975 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/58a85ee7-3417-491b-a375-99f140cfb5de-host\") pod \"node-ca-vzkz4\" (UID: \"58a85ee7-3417-491b-a375-99f140cfb5de\") " pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.227712 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/58a85ee7-3417-491b-a375-99f140cfb5de-host\") pod \"node-ca-vzkz4\" (UID: \"58a85ee7-3417-491b-a375-99f140cfb5de\") " pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.228067 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/58a85ee7-3417-491b-a375-99f140cfb5de-serviceca\") pod \"node-ca-vzkz4\" (UID: \"58a85ee7-3417-491b-a375-99f140cfb5de\") " pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.257494 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.257534 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.257543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.257558 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.257568 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:19Z","lastTransitionTime":"2025-11-24T01:13:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.269690 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tts5\" (UniqueName: \"kubernetes.io/projected/58a85ee7-3417-491b-a375-99f140cfb5de-kube-api-access-7tts5\") pod \"node-ca-vzkz4\" (UID: \"58a85ee7-3417-491b-a375-99f140cfb5de\") " pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.284451 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.322021 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vzkz4" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.322957 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: W1124 01:13:19.332428 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58a85ee7_3417_491b_a375_99f140cfb5de.slice/crio-1544dce89e9b69c2872b9cc1917c1859621b9c0a9d3b42c85b5696b5330729d4 WatchSource:0}: Error finding container 1544dce89e9b69c2872b9cc1917c1859621b9c0a9d3b42c85b5696b5330729d4: Status 404 returned error can't find the container with id 1544dce89e9b69c2872b9cc1917c1859621b9c0a9d3b42c85b5696b5330729d4 Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.363098 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.363183 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.363454 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.363463 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.363478 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.363489 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:19Z","lastTransitionTime":"2025-11-24T01:13:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.399597 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.445721 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.465695 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.465725 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.465733 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.465748 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.465757 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:19Z","lastTransitionTime":"2025-11-24T01:13:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.485628 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.523040 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.529581 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.529645 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.529773 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.529787 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.529798 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.529831 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:23.529819802 +0000 UTC m=+28.215885303 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.530071 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.530081 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.530089 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.530110 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:23.530103501 +0000 UTC m=+28.216169002 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.563697 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.568402 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.568435 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.568445 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.568462 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.568477 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:19Z","lastTransitionTime":"2025-11-24T01:13:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.602509 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.631195 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.631284 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.631346 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.631411 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.631424 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:13:23.631395232 +0000 UTC m=+28.317460733 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.631462 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:23.631447033 +0000 UTC m=+28.317512644 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.631581 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.631693 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:23.63167727 +0000 UTC m=+28.317742771 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.646316 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.671347 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.671386 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.671402 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.671423 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.671438 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:19Z","lastTransitionTime":"2025-11-24T01:13:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.683263 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.720246 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.763430 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.774393 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.774426 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.774436 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.774450 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.774460 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:19Z","lastTransitionTime":"2025-11-24T01:13:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.801199 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.848584 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.878029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.878084 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.878098 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.878123 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.878138 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:19Z","lastTransitionTime":"2025-11-24T01:13:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.881977 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.923985 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.965728 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:19Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.981420 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.981463 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.981475 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.981492 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.981505 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:19Z","lastTransitionTime":"2025-11-24T01:13:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.996188 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:19 crc kubenswrapper[4755]: I1124 01:13:19.996246 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.996326 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:19 crc kubenswrapper[4755]: E1124 01:13:19.996496 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.004258 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.057799 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.084778 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.084856 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.084872 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.084895 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.084909 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:20Z","lastTransitionTime":"2025-11-24T01:13:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.086313 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.126509 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.156827 4755 generic.go:334] "Generic (PLEG): container finished" podID="27983572-2d9c-43d6-a7f0-445a0aec0531" containerID="cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b" exitCode=0 Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.156922 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" event={"ID":"27983572-2d9c-43d6-a7f0-445a0aec0531","Type":"ContainerDied","Data":"cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.160478 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vzkz4" event={"ID":"58a85ee7-3417-491b-a375-99f140cfb5de","Type":"ContainerStarted","Data":"67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.160528 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vzkz4" event={"ID":"58a85ee7-3417-491b-a375-99f140cfb5de","Type":"ContainerStarted","Data":"1544dce89e9b69c2872b9cc1917c1859621b9c0a9d3b42c85b5696b5330729d4"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.165308 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.188730 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.188778 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.188794 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.188814 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.188830 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:20Z","lastTransitionTime":"2025-11-24T01:13:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.206152 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.242676 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.280000 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.291232 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.291267 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.291279 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.291294 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.291306 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:20Z","lastTransitionTime":"2025-11-24T01:13:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.326228 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.366870 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.394222 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.394261 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.394271 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.394286 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.394297 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:20Z","lastTransitionTime":"2025-11-24T01:13:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.407825 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.441636 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.480116 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.496312 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.496347 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.496358 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.496377 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.496388 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:20Z","lastTransitionTime":"2025-11-24T01:13:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.522136 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.531177 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.532154 4755 scope.go:117] "RemoveContainer" containerID="464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a" Nov 24 01:13:20 crc kubenswrapper[4755]: E1124 01:13:20.532338 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.566890 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.598910 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.598954 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.598969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.598989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.599001 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:20Z","lastTransitionTime":"2025-11-24T01:13:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.604111 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.642862 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.683468 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.701457 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.701485 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.701494 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.701508 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.701516 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:20Z","lastTransitionTime":"2025-11-24T01:13:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.723487 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.762403 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.804575 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.804639 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.804650 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.804667 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.804681 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:20Z","lastTransitionTime":"2025-11-24T01:13:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.805574 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.844519 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.890233 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.906838 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.906878 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.906891 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.906910 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.906922 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:20Z","lastTransitionTime":"2025-11-24T01:13:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.920247 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:20Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:20 crc kubenswrapper[4755]: I1124 01:13:20.996164 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:20 crc kubenswrapper[4755]: E1124 01:13:20.996772 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.009085 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.009132 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.009142 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.009158 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.009169 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:21Z","lastTransitionTime":"2025-11-24T01:13:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.111862 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.111897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.111906 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.111921 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.111931 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:21Z","lastTransitionTime":"2025-11-24T01:13:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.167967 4755 generic.go:334] "Generic (PLEG): container finished" podID="27983572-2d9c-43d6-a7f0-445a0aec0531" containerID="372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9" exitCode=0 Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.168035 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" event={"ID":"27983572-2d9c-43d6-a7f0-445a0aec0531","Type":"ContainerDied","Data":"372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.176487 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.185396 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.203448 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.214082 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.214150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.214174 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.214205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.214227 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:21Z","lastTransitionTime":"2025-11-24T01:13:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.216689 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.228162 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.239304 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.251692 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.263142 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.294125 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.305016 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.319510 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.319567 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.319584 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.319646 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.319664 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:21Z","lastTransitionTime":"2025-11-24T01:13:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.322844 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.363150 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.404258 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.422570 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.422619 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.422628 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.422644 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.422656 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:21Z","lastTransitionTime":"2025-11-24T01:13:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.446699 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.482401 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.525746 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.525813 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.525836 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.525868 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.525897 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:21Z","lastTransitionTime":"2025-11-24T01:13:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.530365 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:21Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.629085 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.629155 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.629167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.629188 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.629205 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:21Z","lastTransitionTime":"2025-11-24T01:13:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.730998 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.731040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.731054 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.731073 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.731087 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:21Z","lastTransitionTime":"2025-11-24T01:13:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.833658 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.833692 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.833700 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.833714 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.833723 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:21Z","lastTransitionTime":"2025-11-24T01:13:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.935732 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.935769 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.935778 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.935792 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.935803 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:21Z","lastTransitionTime":"2025-11-24T01:13:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.996220 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:21 crc kubenswrapper[4755]: I1124 01:13:21.996248 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:21 crc kubenswrapper[4755]: E1124 01:13:21.996421 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:21 crc kubenswrapper[4755]: E1124 01:13:21.996566 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.038564 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.038661 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.038687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.038717 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.038735 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:22Z","lastTransitionTime":"2025-11-24T01:13:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.141727 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.141781 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.141793 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.141814 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.141833 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:22Z","lastTransitionTime":"2025-11-24T01:13:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.185556 4755 generic.go:334] "Generic (PLEG): container finished" podID="27983572-2d9c-43d6-a7f0-445a0aec0531" containerID="56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac" exitCode=0 Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.185655 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" event={"ID":"27983572-2d9c-43d6-a7f0-445a0aec0531","Type":"ContainerDied","Data":"56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.206777 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.224802 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.235383 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.244954 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.244993 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.245004 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.245019 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.245028 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:22Z","lastTransitionTime":"2025-11-24T01:13:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.247915 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.256832 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.269985 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.281236 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.303451 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.316366 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.331886 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.347487 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.347524 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.347535 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.347550 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.347563 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:22Z","lastTransitionTime":"2025-11-24T01:13:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.352401 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.364924 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.377172 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.387701 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.396103 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:22Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.450244 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.450277 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.450290 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.450302 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.450311 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:22Z","lastTransitionTime":"2025-11-24T01:13:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.552916 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.552948 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.552955 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.552969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.552978 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:22Z","lastTransitionTime":"2025-11-24T01:13:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.655513 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.655540 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.655549 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.655567 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.655586 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:22Z","lastTransitionTime":"2025-11-24T01:13:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.758021 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.758056 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.758067 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.758081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.758091 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:22Z","lastTransitionTime":"2025-11-24T01:13:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.860849 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.860891 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.860903 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.860930 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.860940 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:22Z","lastTransitionTime":"2025-11-24T01:13:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.963431 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.963468 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.963478 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.963498 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.963507 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:22Z","lastTransitionTime":"2025-11-24T01:13:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:22 crc kubenswrapper[4755]: I1124 01:13:22.995981 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:22 crc kubenswrapper[4755]: E1124 01:13:22.996121 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.065577 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.065961 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.065981 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.066005 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.066021 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:23Z","lastTransitionTime":"2025-11-24T01:13:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.168882 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.168923 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.168932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.168948 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.168959 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:23Z","lastTransitionTime":"2025-11-24T01:13:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.193954 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.194197 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.198464 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" event={"ID":"27983572-2d9c-43d6-a7f0-445a0aec0531","Type":"ContainerStarted","Data":"612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.209666 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.220587 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.226894 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.240141 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.251023 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.263210 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.270839 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.270865 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.270873 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.270886 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.270895 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:23Z","lastTransitionTime":"2025-11-24T01:13:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.282906 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.296339 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.313103 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.326903 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.339858 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.353058 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.363055 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.373109 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.373219 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.373252 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.373263 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.373279 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.373288 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:23Z","lastTransitionTime":"2025-11-24T01:13:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.389083 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.399976 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.411714 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.423482 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.435221 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.446676 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.455438 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.465736 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.475692 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.475998 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.476034 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.476050 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.476066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.476077 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:23Z","lastTransitionTime":"2025-11-24T01:13:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.492820 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.504491 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.516781 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.530834 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.544673 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.558746 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.567049 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.567103 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.567280 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.567299 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.567312 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.567318 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.567353 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.567368 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:31.567352158 +0000 UTC m=+36.253417659 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.567369 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.567411 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:31.567402359 +0000 UTC m=+36.253467860 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.569105 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.584279 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.584320 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.584332 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.584346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.584357 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:23Z","lastTransitionTime":"2025-11-24T01:13:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.589369 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:23Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.668218 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.668394 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:13:31.668369691 +0000 UTC m=+36.354435192 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.668478 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.668513 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.668663 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.668700 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:31.66869393 +0000 UTC m=+36.354759431 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.668728 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.668851 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:31.668820964 +0000 UTC m=+36.354886505 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.686739 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.686777 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.686788 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.686804 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.686816 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:23Z","lastTransitionTime":"2025-11-24T01:13:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.789066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.789103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.789112 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.789126 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.789135 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:23Z","lastTransitionTime":"2025-11-24T01:13:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.891565 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.891635 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.891650 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.891666 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.891679 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:23Z","lastTransitionTime":"2025-11-24T01:13:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.993992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.994030 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.994040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.994058 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.994068 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:23Z","lastTransitionTime":"2025-11-24T01:13:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.996297 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.996406 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:23 crc kubenswrapper[4755]: I1124 01:13:23.996304 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:23 crc kubenswrapper[4755]: E1124 01:13:23.996488 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.096706 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.096745 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.096763 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.096779 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.096789 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:24Z","lastTransitionTime":"2025-11-24T01:13:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.199397 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.199438 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.199449 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.199469 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.199482 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:24Z","lastTransitionTime":"2025-11-24T01:13:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.201331 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.201389 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.229194 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.257658 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.273506 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.286671 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.298718 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.301150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.301177 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.301187 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.301200 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.301208 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:24Z","lastTransitionTime":"2025-11-24T01:13:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.310876 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.322760 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.333901 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.344736 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.355975 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.371383 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.388143 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.400596 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.403274 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.403317 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.403360 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.403380 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.403390 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:24Z","lastTransitionTime":"2025-11-24T01:13:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.426590 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.441847 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.462723 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:24Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.505776 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.505847 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.505858 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.505874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.505885 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:24Z","lastTransitionTime":"2025-11-24T01:13:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.608941 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.608990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.609002 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.609019 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.609034 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:24Z","lastTransitionTime":"2025-11-24T01:13:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.711834 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.711881 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.711892 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.711913 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.711925 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:24Z","lastTransitionTime":"2025-11-24T01:13:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.813579 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.813628 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.813639 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.813652 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.813661 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:24Z","lastTransitionTime":"2025-11-24T01:13:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.916177 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.916238 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.916264 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.916288 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.916302 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:24Z","lastTransitionTime":"2025-11-24T01:13:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:24 crc kubenswrapper[4755]: I1124 01:13:24.995843 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:24 crc kubenswrapper[4755]: E1124 01:13:24.996073 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.020485 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.020555 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.020577 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.020599 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.020647 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.123182 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.123246 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.123264 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.123289 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.123307 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.225961 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.226484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.226555 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.226653 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.226814 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.329257 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.329292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.329300 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.329315 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.329324 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.375751 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.375784 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.375794 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.375807 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.375815 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: E1124 01:13:25.386666 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:25Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.390104 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.390130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.390138 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.390148 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.390158 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: E1124 01:13:25.404054 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:25Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.408735 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.408880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.408920 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.408969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.408990 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: E1124 01:13:25.426966 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:25Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.430482 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.430528 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.430540 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.430561 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.430577 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: E1124 01:13:25.442705 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:25Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.446032 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.446088 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.446101 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.446120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.446132 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: E1124 01:13:25.460340 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:25Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:25 crc kubenswrapper[4755]: E1124 01:13:25.460501 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.462089 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.462212 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.462276 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.462704 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.462808 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.565632 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.565675 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.565686 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.565702 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.565715 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.669292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.669331 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.669341 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.669356 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.669364 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.771954 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.771992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.772000 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.772014 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.772024 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.874857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.874893 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.874901 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.874919 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.874931 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.977857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.977902 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.977917 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.977938 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.977951 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:25Z","lastTransitionTime":"2025-11-24T01:13:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.996546 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:25 crc kubenswrapper[4755]: I1124 01:13:25.996581 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:25 crc kubenswrapper[4755]: E1124 01:13:25.996677 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:25 crc kubenswrapper[4755]: E1124 01:13:25.996777 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.008538 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.019573 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.028834 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.045266 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.061558 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.075448 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.080253 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.080281 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.080288 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.080301 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.080310 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:26Z","lastTransitionTime":"2025-11-24T01:13:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.089236 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.103681 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.118594 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.130500 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.146550 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.158890 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.182523 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.182559 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.182569 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.182614 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.182630 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:26Z","lastTransitionTime":"2025-11-24T01:13:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.186048 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.203129 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.208162 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/0.log" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.211418 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b" exitCode=1 Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.211782 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b"} Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.212706 4755 scope.go:117] "RemoveContainer" containerID="49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.231736 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.246673 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.268105 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.280920 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.286633 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.286703 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.286722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.287325 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.287423 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:26Z","lastTransitionTime":"2025-11-24T01:13:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.291379 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.311766 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"/informers/factory.go:160\\\\nI1124 01:13:25.955306 6051 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955524 6051 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955547 6051 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955657 6051 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955850 6051 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 01:13:25.955880 6051 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 01:13:25.955893 6051 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.956063 6051 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 01:13:25.956609 6051 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.326658 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.344156 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.359360 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.371307 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.380049 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.391072 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.391106 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.391116 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.391134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.391144 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:26Z","lastTransitionTime":"2025-11-24T01:13:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.395080 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.403850 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.426635 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.438434 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.451198 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:26Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.493167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.493248 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.493261 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.493276 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.493288 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:26Z","lastTransitionTime":"2025-11-24T01:13:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.595334 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.595377 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.595386 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.595411 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.595425 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:26Z","lastTransitionTime":"2025-11-24T01:13:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.697503 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.697532 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.697539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.697552 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.697560 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:26Z","lastTransitionTime":"2025-11-24T01:13:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.799368 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.799406 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.799417 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.799434 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.799445 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:26Z","lastTransitionTime":"2025-11-24T01:13:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.902099 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.902139 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.902148 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.902163 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.902172 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:26Z","lastTransitionTime":"2025-11-24T01:13:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:26 crc kubenswrapper[4755]: I1124 01:13:26.995854 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:26 crc kubenswrapper[4755]: E1124 01:13:26.995992 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.004032 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.004078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.004088 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.004104 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.004116 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:27Z","lastTransitionTime":"2025-11-24T01:13:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.107030 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.107073 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.107083 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.107098 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.107110 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:27Z","lastTransitionTime":"2025-11-24T01:13:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.209513 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.209573 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.209590 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.209637 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.209651 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:27Z","lastTransitionTime":"2025-11-24T01:13:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.215332 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/1.log" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.215921 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/0.log" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.218881 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4" exitCode=1 Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.218911 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.218977 4755 scope.go:117] "RemoveContainer" containerID="49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.219641 4755 scope.go:117] "RemoveContainer" containerID="7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4" Nov 24 01:13:27 crc kubenswrapper[4755]: E1124 01:13:27.219800 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.236329 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.248408 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.257796 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.272054 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.286281 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.312227 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.312266 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.312283 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.312302 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.312312 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:27Z","lastTransitionTime":"2025-11-24T01:13:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.316636 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.332653 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.349328 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.370701 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"/informers/factory.go:160\\\\nI1124 01:13:25.955306 6051 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955524 6051 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955547 6051 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955657 6051 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955850 6051 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 01:13:25.955880 6051 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 01:13:25.955893 6051 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.956063 6051 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 01:13:25.956609 6051 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.382951 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.396029 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.408330 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.415078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.415117 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.415132 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.415148 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.415166 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:27Z","lastTransitionTime":"2025-11-24T01:13:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.419036 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.431761 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.448074 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.517842 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.517918 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.517941 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.517984 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.518004 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:27Z","lastTransitionTime":"2025-11-24T01:13:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.621094 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.621134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.621145 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.621167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.621178 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:27Z","lastTransitionTime":"2025-11-24T01:13:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.724682 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.724736 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.724751 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.724772 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.724816 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:27Z","lastTransitionTime":"2025-11-24T01:13:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.751818 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc"] Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.752258 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.755284 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.756160 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.767290 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.781276 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.792445 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.801290 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.818350 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49da0175a68fbe6e5485a54e8b21dbfbef01484351410d51eabbb5dbbc8a682b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:25Z\\\",\\\"message\\\":\\\"/informers/factory.go:160\\\\nI1124 01:13:25.955306 6051 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955524 6051 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955547 6051 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955657 6051 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.955850 6051 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 01:13:25.955880 6051 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 01:13:25.955893 6051 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 01:13:25.956063 6051 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 01:13:25.956609 6051 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.827153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.827359 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.827459 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.827553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.827662 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:27Z","lastTransitionTime":"2025-11-24T01:13:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.830963 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.843754 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.857035 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.867247 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.878713 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.891740 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.903591 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.911020 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.911197 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.911302 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.911412 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pjf7\" (UniqueName: \"kubernetes.io/projected/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-kube-api-access-2pjf7\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.916157 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.937825 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.938268 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.938370 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.938477 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.938578 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:27Z","lastTransitionTime":"2025-11-24T01:13:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.941190 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.952373 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.966546 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:27Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.995922 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:27 crc kubenswrapper[4755]: E1124 01:13:27.996135 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:27 crc kubenswrapper[4755]: I1124 01:13:27.996664 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:27 crc kubenswrapper[4755]: E1124 01:13:27.996775 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.012665 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.012764 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pjf7\" (UniqueName: \"kubernetes.io/projected/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-kube-api-access-2pjf7\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.012801 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.012826 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.013587 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.013906 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.017642 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.029977 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pjf7\" (UniqueName: \"kubernetes.io/projected/a32df1d1-89b4-4a22-a07f-2d7ecd2e265b-kube-api-access-2pjf7\") pod \"ovnkube-control-plane-749d76644c-7wvfc\" (UID: \"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.041697 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.041734 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.041743 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.041758 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.041769 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:28Z","lastTransitionTime":"2025-11-24T01:13:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.064955 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.144368 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.144430 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.144447 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.144471 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.144489 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:28Z","lastTransitionTime":"2025-11-24T01:13:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.223715 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" event={"ID":"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b","Type":"ContainerStarted","Data":"5d4f9572849375f42b0c68bc092ba31d7ff18e378deb43beedcdaa65019d1dee"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.225525 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/1.log" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.228817 4755 scope.go:117] "RemoveContainer" containerID="7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4" Nov 24 01:13:28 crc kubenswrapper[4755]: E1124 01:13:28.228950 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.241469 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.246724 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.246759 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.246771 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.246786 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.246797 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:28Z","lastTransitionTime":"2025-11-24T01:13:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.252797 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.262667 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.276496 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.293590 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.303566 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.314834 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.331542 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.349756 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.349795 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.349803 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.349819 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.349830 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:28Z","lastTransitionTime":"2025-11-24T01:13:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.351874 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.364051 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.376349 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.387300 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.403450 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.413843 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.425536 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.438541 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:28Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.452712 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.452746 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.452757 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.452792 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.452805 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:28Z","lastTransitionTime":"2025-11-24T01:13:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.555439 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.555845 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.555978 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.556250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.556371 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:28Z","lastTransitionTime":"2025-11-24T01:13:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.661131 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.661546 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.661741 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.661909 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.662069 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:28Z","lastTransitionTime":"2025-11-24T01:13:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.765496 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.765528 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.765539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.765555 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.765568 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:28Z","lastTransitionTime":"2025-11-24T01:13:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.868596 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.868880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.868888 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.868901 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.868910 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:28Z","lastTransitionTime":"2025-11-24T01:13:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.971067 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.971096 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.971105 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.971118 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.971126 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:28Z","lastTransitionTime":"2025-11-24T01:13:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:28 crc kubenswrapper[4755]: I1124 01:13:28.996362 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:28 crc kubenswrapper[4755]: E1124 01:13:28.996496 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.073458 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.073519 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.073536 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.073560 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.073579 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:29Z","lastTransitionTime":"2025-11-24T01:13:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.175658 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.175701 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.175710 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.175731 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.175741 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:29Z","lastTransitionTime":"2025-11-24T01:13:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.232015 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" event={"ID":"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b","Type":"ContainerStarted","Data":"ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.232060 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" event={"ID":"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b","Type":"ContainerStarted","Data":"1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.244240 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.256670 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.271000 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.277655 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.277693 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.277704 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.277720 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.277732 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:29Z","lastTransitionTime":"2025-11-24T01:13:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.280990 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.292865 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.302211 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.319506 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.332370 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.346982 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.368596 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.380503 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.380556 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.380571 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.380591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.380627 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:29Z","lastTransitionTime":"2025-11-24T01:13:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.383272 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.400722 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.410847 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.427420 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.441018 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.453285 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.483270 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.483314 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.483326 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.483342 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.483353 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:29Z","lastTransitionTime":"2025-11-24T01:13:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.586689 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.586731 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.586741 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.586757 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.586773 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:29Z","lastTransitionTime":"2025-11-24T01:13:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.590542 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-9cl8m"] Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.591496 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:29 crc kubenswrapper[4755]: E1124 01:13:29.591662 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.605059 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.632204 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.649730 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.665002 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.680012 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.689233 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.689283 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.689294 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.689312 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.689326 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:29Z","lastTransitionTime":"2025-11-24T01:13:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.693370 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.706580 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.720992 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.731234 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl68c\" (UniqueName: \"kubernetes.io/projected/ccb86693-0b66-43ca-a2d1-e9594521d30f-kube-api-access-wl68c\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.731284 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.734380 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.748978 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.761920 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.774199 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.791587 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.791636 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.791645 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.791658 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.791667 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:29Z","lastTransitionTime":"2025-11-24T01:13:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.791667 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.804569 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.828849 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.832350 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl68c\" (UniqueName: \"kubernetes.io/projected/ccb86693-0b66-43ca-a2d1-e9594521d30f-kube-api-access-wl68c\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.832388 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:29 crc kubenswrapper[4755]: E1124 01:13:29.832579 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:29 crc kubenswrapper[4755]: E1124 01:13:29.832658 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs podName:ccb86693-0b66-43ca-a2d1-e9594521d30f nodeName:}" failed. No retries permitted until 2025-11-24 01:13:30.332645754 +0000 UTC m=+35.018711255 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs") pod "network-metrics-daemon-9cl8m" (UID: "ccb86693-0b66-43ca-a2d1-e9594521d30f") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.846648 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.860355 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl68c\" (UniqueName: \"kubernetes.io/projected/ccb86693-0b66-43ca-a2d1-e9594521d30f-kube-api-access-wl68c\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.862301 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:29Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.893888 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.893928 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.893936 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.893952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.893963 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:29Z","lastTransitionTime":"2025-11-24T01:13:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.995656 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:29 crc kubenswrapper[4755]: E1124 01:13:29.995854 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.995681 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:29 crc kubenswrapper[4755]: E1124 01:13:29.996008 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.996510 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.996546 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.996562 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.996580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:29 crc kubenswrapper[4755]: I1124 01:13:29.996592 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:29Z","lastTransitionTime":"2025-11-24T01:13:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.098664 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.098706 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.098716 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.098733 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.098744 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:30Z","lastTransitionTime":"2025-11-24T01:13:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.201596 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.201671 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.201682 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.201709 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.201722 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:30Z","lastTransitionTime":"2025-11-24T01:13:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.304237 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.304280 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.304293 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.304309 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.304324 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:30Z","lastTransitionTime":"2025-11-24T01:13:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.336457 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:30 crc kubenswrapper[4755]: E1124 01:13:30.336897 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:30 crc kubenswrapper[4755]: E1124 01:13:30.336970 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs podName:ccb86693-0b66-43ca-a2d1-e9594521d30f nodeName:}" failed. No retries permitted until 2025-11-24 01:13:31.336954006 +0000 UTC m=+36.023019507 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs") pod "network-metrics-daemon-9cl8m" (UID: "ccb86693-0b66-43ca-a2d1-e9594521d30f") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.407200 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.407242 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.407255 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.407279 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.407291 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:30Z","lastTransitionTime":"2025-11-24T01:13:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.510269 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.510365 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.510385 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.510412 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.510429 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:30Z","lastTransitionTime":"2025-11-24T01:13:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.614985 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.615022 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.615030 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.615045 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.615054 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:30Z","lastTransitionTime":"2025-11-24T01:13:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.717320 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.717355 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.717365 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.717378 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.717387 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:30Z","lastTransitionTime":"2025-11-24T01:13:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.820692 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.820732 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.820742 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.820761 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.820772 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:30Z","lastTransitionTime":"2025-11-24T01:13:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.924324 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.924386 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.924407 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.924429 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.924444 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:30Z","lastTransitionTime":"2025-11-24T01:13:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.996262 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:30 crc kubenswrapper[4755]: E1124 01:13:30.996425 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:30 crc kubenswrapper[4755]: I1124 01:13:30.996268 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:30 crc kubenswrapper[4755]: E1124 01:13:30.996698 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.026944 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.027012 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.027021 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.027035 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.027045 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:31Z","lastTransitionTime":"2025-11-24T01:13:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.129643 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.129723 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.129735 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.129749 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.129777 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:31Z","lastTransitionTime":"2025-11-24T01:13:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.232262 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.232301 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.232311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.232325 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.232335 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:31Z","lastTransitionTime":"2025-11-24T01:13:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.335936 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.336023 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.336039 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.336061 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.336075 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:31Z","lastTransitionTime":"2025-11-24T01:13:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.348030 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.348272 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.348418 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs podName:ccb86693-0b66-43ca-a2d1-e9594521d30f nodeName:}" failed. No retries permitted until 2025-11-24 01:13:33.348383842 +0000 UTC m=+38.034449393 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs") pod "network-metrics-daemon-9cl8m" (UID: "ccb86693-0b66-43ca-a2d1-e9594521d30f") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.439313 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.439351 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.439361 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.439374 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.439384 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:31Z","lastTransitionTime":"2025-11-24T01:13:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.542066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.542139 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.542248 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.542281 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.542298 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:31Z","lastTransitionTime":"2025-11-24T01:13:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.645479 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.645518 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.645529 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.645547 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.645557 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:31Z","lastTransitionTime":"2025-11-24T01:13:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.651241 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.651379 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.651543 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.651568 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.651579 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.651626 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.651662 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.651681 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.651644 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:47.651627817 +0000 UTC m=+52.337693318 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.651764 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:47.65174158 +0000 UTC m=+52.337807111 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.748711 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.749059 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.749154 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.749248 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.749350 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:31Z","lastTransitionTime":"2025-11-24T01:13:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.752167 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.752291 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.752328 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.752450 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.752497 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:47.752481795 +0000 UTC m=+52.438547306 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.752564 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:13:47.752554567 +0000 UTC m=+52.438620088 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.752623 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.752657 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:13:47.75264893 +0000 UTC m=+52.438714441 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.852094 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.852183 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.852203 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.852229 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.852249 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:31Z","lastTransitionTime":"2025-11-24T01:13:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.954785 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.954857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.954885 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.954919 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.954941 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:31Z","lastTransitionTime":"2025-11-24T01:13:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.996461 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:31 crc kubenswrapper[4755]: I1124 01:13:31.996679 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.997081 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:31 crc kubenswrapper[4755]: E1124 01:13:31.997283 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.059246 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.059344 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.059368 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.059392 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.059410 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:32Z","lastTransitionTime":"2025-11-24T01:13:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.161341 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.161646 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.161801 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.161937 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.162052 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:32Z","lastTransitionTime":"2025-11-24T01:13:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.265064 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.265348 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.265441 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.265548 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.265661 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:32Z","lastTransitionTime":"2025-11-24T01:13:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.368975 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.369054 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.369073 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.369100 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.369118 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:32Z","lastTransitionTime":"2025-11-24T01:13:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.472547 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.472633 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.472648 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.472670 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.472686 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:32Z","lastTransitionTime":"2025-11-24T01:13:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.574985 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.575071 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.575126 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.575152 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.575170 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:32Z","lastTransitionTime":"2025-11-24T01:13:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.677748 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.677800 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.677814 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.677834 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.677850 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:32Z","lastTransitionTime":"2025-11-24T01:13:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.779761 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.779801 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.779812 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.779828 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.779839 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:32Z","lastTransitionTime":"2025-11-24T01:13:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.882682 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.882798 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.882884 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.883347 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.883774 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:32Z","lastTransitionTime":"2025-11-24T01:13:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.986440 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.986503 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.986523 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.986551 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.986574 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:32Z","lastTransitionTime":"2025-11-24T01:13:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.995798 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:32 crc kubenswrapper[4755]: I1124 01:13:32.995852 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:32 crc kubenswrapper[4755]: E1124 01:13:32.995972 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:32 crc kubenswrapper[4755]: E1124 01:13:32.996051 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.089344 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.089408 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.089425 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.089448 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.089466 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:33Z","lastTransitionTime":"2025-11-24T01:13:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.192584 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.192977 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.193077 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.193170 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.193251 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:33Z","lastTransitionTime":"2025-11-24T01:13:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.296422 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.296496 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.296520 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.296543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.296561 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:33Z","lastTransitionTime":"2025-11-24T01:13:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.368813 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:33 crc kubenswrapper[4755]: E1124 01:13:33.369396 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:33 crc kubenswrapper[4755]: E1124 01:13:33.369517 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs podName:ccb86693-0b66-43ca-a2d1-e9594521d30f nodeName:}" failed. No retries permitted until 2025-11-24 01:13:37.369485392 +0000 UTC m=+42.055550933 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs") pod "network-metrics-daemon-9cl8m" (UID: "ccb86693-0b66-43ca-a2d1-e9594521d30f") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.398402 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.398430 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.398438 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.398452 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.398461 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:33Z","lastTransitionTime":"2025-11-24T01:13:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.501480 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.501530 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.501547 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.501569 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.501587 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:33Z","lastTransitionTime":"2025-11-24T01:13:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.605329 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.605484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.605506 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.605534 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.605554 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:33Z","lastTransitionTime":"2025-11-24T01:13:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.708183 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.708240 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.708258 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.708285 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.708304 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:33Z","lastTransitionTime":"2025-11-24T01:13:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.811416 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.811489 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.811511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.811539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.811560 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:33Z","lastTransitionTime":"2025-11-24T01:13:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.914495 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.914562 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.914586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.914713 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.914769 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:33Z","lastTransitionTime":"2025-11-24T01:13:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.996469 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:33 crc kubenswrapper[4755]: I1124 01:13:33.996536 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:33 crc kubenswrapper[4755]: E1124 01:13:33.996797 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:33 crc kubenswrapper[4755]: E1124 01:13:33.996989 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.017506 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.017546 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.017556 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.017572 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.017583 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:34Z","lastTransitionTime":"2025-11-24T01:13:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.120926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.120988 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.121001 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.121021 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.121032 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:34Z","lastTransitionTime":"2025-11-24T01:13:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.223951 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.224013 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.224034 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.224062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.224085 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:34Z","lastTransitionTime":"2025-11-24T01:13:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.326723 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.326758 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.326769 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.326784 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.326795 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:34Z","lastTransitionTime":"2025-11-24T01:13:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.428837 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.428897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.428909 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.428929 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.428942 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:34Z","lastTransitionTime":"2025-11-24T01:13:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.532788 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.532836 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.532847 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.532864 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.532877 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:34Z","lastTransitionTime":"2025-11-24T01:13:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.637012 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.637078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.637095 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.637120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.637138 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:34Z","lastTransitionTime":"2025-11-24T01:13:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.740697 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.740755 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.740767 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.740786 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.740798 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:34Z","lastTransitionTime":"2025-11-24T01:13:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.843587 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.843707 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.843725 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.843751 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.843768 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:34Z","lastTransitionTime":"2025-11-24T01:13:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.947167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.947226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.947242 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.947267 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.947284 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:34Z","lastTransitionTime":"2025-11-24T01:13:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.995783 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.995852 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:34 crc kubenswrapper[4755]: E1124 01:13:34.995990 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:34 crc kubenswrapper[4755]: E1124 01:13:34.996105 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:34 crc kubenswrapper[4755]: I1124 01:13:34.997042 4755 scope.go:117] "RemoveContainer" containerID="464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.052507 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.052892 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.053055 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.053233 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.053394 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.157114 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.157150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.157160 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.157175 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.157184 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.255284 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.259169 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.259977 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.260070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.260082 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.260089 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.260099 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.260109 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.277111 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.293358 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.305020 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.313769 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.325101 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.338263 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.353596 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.362540 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.362583 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.362592 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.362620 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.362633 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.367501 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.385953 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.402684 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.416821 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.429681 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.445830 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.463562 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.465001 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.465123 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.465192 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.465258 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.465350 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.476325 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.483840 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.483870 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.483878 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.483892 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.483902 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.485541 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: E1124 01:13:35.505172 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.508547 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.508582 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.508593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.508622 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.508634 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.510324 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: E1124 01:13:35.522253 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.525798 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.525835 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.525845 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.525860 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.525871 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: E1124 01:13:35.542335 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.546167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.546204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.546216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.546234 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.546246 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: E1124 01:13:35.557358 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.561243 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.561271 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.561280 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.561295 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.561307 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: E1124 01:13:35.574939 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:35Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:35 crc kubenswrapper[4755]: E1124 01:13:35.575109 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.576809 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.576833 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.576842 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.576857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.576868 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.679475 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.679512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.679524 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.679541 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.679552 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.782244 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.782289 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.782306 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.782330 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.782349 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.885509 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.885580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.885636 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.885672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.885696 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.989355 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.989425 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.989443 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.989468 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.989490 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:35Z","lastTransitionTime":"2025-11-24T01:13:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.996633 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:35 crc kubenswrapper[4755]: I1124 01:13:35.996746 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:35 crc kubenswrapper[4755]: E1124 01:13:35.996906 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:35 crc kubenswrapper[4755]: E1124 01:13:35.997034 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.035270 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.051281 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.069563 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.081907 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.092051 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.092113 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.092130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.092154 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.092171 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:36Z","lastTransitionTime":"2025-11-24T01:13:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.097882 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.118539 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.134541 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.147537 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.174126 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.187250 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.194062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.194109 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.194130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.194153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.194171 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:36Z","lastTransitionTime":"2025-11-24T01:13:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.201031 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.217564 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.236263 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.246493 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.256299 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.269914 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.281857 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:36Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.296264 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.296465 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.296713 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.296776 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.296794 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:36Z","lastTransitionTime":"2025-11-24T01:13:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.399913 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.399949 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.399958 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.399971 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.399982 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:36Z","lastTransitionTime":"2025-11-24T01:13:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.502542 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.502567 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.502575 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.502586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.502596 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:36Z","lastTransitionTime":"2025-11-24T01:13:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.609840 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.609927 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.609954 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.609989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.610024 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:36Z","lastTransitionTime":"2025-11-24T01:13:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.713292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.713339 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.713357 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.713376 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.713392 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:36Z","lastTransitionTime":"2025-11-24T01:13:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.816591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.816682 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.816699 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.816722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.816743 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:36Z","lastTransitionTime":"2025-11-24T01:13:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.920063 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.920124 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.920142 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.920166 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.920184 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:36Z","lastTransitionTime":"2025-11-24T01:13:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.996175 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:36 crc kubenswrapper[4755]: I1124 01:13:36.996222 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:36 crc kubenswrapper[4755]: E1124 01:13:36.996351 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:36 crc kubenswrapper[4755]: E1124 01:13:36.996479 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.023039 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.023089 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.023106 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.023128 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.023148 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:37Z","lastTransitionTime":"2025-11-24T01:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.126412 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.126835 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.126984 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.127124 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.127254 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:37Z","lastTransitionTime":"2025-11-24T01:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.230517 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.230573 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.230591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.230646 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.230695 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:37Z","lastTransitionTime":"2025-11-24T01:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.334100 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.334181 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.334209 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.334242 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.334265 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:37Z","lastTransitionTime":"2025-11-24T01:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.413807 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:37 crc kubenswrapper[4755]: E1124 01:13:37.413981 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:37 crc kubenswrapper[4755]: E1124 01:13:37.414076 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs podName:ccb86693-0b66-43ca-a2d1-e9594521d30f nodeName:}" failed. No retries permitted until 2025-11-24 01:13:45.414054293 +0000 UTC m=+50.100119804 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs") pod "network-metrics-daemon-9cl8m" (UID: "ccb86693-0b66-43ca-a2d1-e9594521d30f") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.436847 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.436948 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.436961 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.436979 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.436992 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:37Z","lastTransitionTime":"2025-11-24T01:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.539078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.539118 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.539134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.539157 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.539170 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:37Z","lastTransitionTime":"2025-11-24T01:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.642769 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.642860 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.642882 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.642911 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.642928 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:37Z","lastTransitionTime":"2025-11-24T01:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.746026 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.746099 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.746117 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.746142 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.746160 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:37Z","lastTransitionTime":"2025-11-24T01:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.849913 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.849996 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.850020 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.850049 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.850070 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:37Z","lastTransitionTime":"2025-11-24T01:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.953001 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.953087 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.953111 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.953143 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.953162 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:37Z","lastTransitionTime":"2025-11-24T01:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.995657 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:37 crc kubenswrapper[4755]: I1124 01:13:37.995782 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:37 crc kubenswrapper[4755]: E1124 01:13:37.995938 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:37 crc kubenswrapper[4755]: E1124 01:13:37.996070 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.056373 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.056412 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.056423 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.056439 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.056451 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:38Z","lastTransitionTime":"2025-11-24T01:13:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.159489 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.159521 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.159529 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.159542 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.159552 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:38Z","lastTransitionTime":"2025-11-24T01:13:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.262078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.262123 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.262135 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.262153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.262170 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:38Z","lastTransitionTime":"2025-11-24T01:13:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.365134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.365189 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.365206 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.365231 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.365251 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:38Z","lastTransitionTime":"2025-11-24T01:13:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.467950 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.467997 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.468009 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.468025 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.468037 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:38Z","lastTransitionTime":"2025-11-24T01:13:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.571016 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.571075 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.571087 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.571106 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.571120 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:38Z","lastTransitionTime":"2025-11-24T01:13:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.699403 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.699460 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.699478 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.699504 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.699527 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:38Z","lastTransitionTime":"2025-11-24T01:13:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.801884 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.801954 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.801973 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.801992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.802004 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:38Z","lastTransitionTime":"2025-11-24T01:13:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.904509 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.904558 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.904568 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.904584 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.904595 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:38Z","lastTransitionTime":"2025-11-24T01:13:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.996482 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:38 crc kubenswrapper[4755]: I1124 01:13:38.996495 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:38 crc kubenswrapper[4755]: E1124 01:13:38.996652 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:38 crc kubenswrapper[4755]: E1124 01:13:38.996756 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.007697 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.007760 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.007784 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.007814 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.007835 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:39Z","lastTransitionTime":"2025-11-24T01:13:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.110593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.110657 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.110668 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.110686 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.110700 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:39Z","lastTransitionTime":"2025-11-24T01:13:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.213317 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.213388 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.213404 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.213429 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.213448 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:39Z","lastTransitionTime":"2025-11-24T01:13:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.315805 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.315871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.315886 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.315907 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.315921 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:39Z","lastTransitionTime":"2025-11-24T01:13:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.418669 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.418861 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.418874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.418887 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.418896 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:39Z","lastTransitionTime":"2025-11-24T01:13:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.522198 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.522256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.522280 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.522309 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.522327 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:39Z","lastTransitionTime":"2025-11-24T01:13:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.625097 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.625162 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.625175 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.625191 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.625203 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:39Z","lastTransitionTime":"2025-11-24T01:13:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.727995 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.728360 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.728511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.728752 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.728885 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:39Z","lastTransitionTime":"2025-11-24T01:13:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.832185 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.832218 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.832226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.832241 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.832253 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:39Z","lastTransitionTime":"2025-11-24T01:13:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.934583 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.934704 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.934743 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.934879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.934970 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:39Z","lastTransitionTime":"2025-11-24T01:13:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.996056 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:39 crc kubenswrapper[4755]: E1124 01:13:39.996265 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:39 crc kubenswrapper[4755]: I1124 01:13:39.996987 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:39 crc kubenswrapper[4755]: E1124 01:13:39.997136 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.039567 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.039683 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.039703 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.039728 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.039745 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:40Z","lastTransitionTime":"2025-11-24T01:13:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.142267 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.142339 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.142361 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.142392 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.142415 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:40Z","lastTransitionTime":"2025-11-24T01:13:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.246054 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.246123 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.246148 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.246178 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.246202 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:40Z","lastTransitionTime":"2025-11-24T01:13:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.349413 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.349465 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.349481 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.349504 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.349521 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:40Z","lastTransitionTime":"2025-11-24T01:13:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.451758 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.451827 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.451848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.451875 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.451894 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:40Z","lastTransitionTime":"2025-11-24T01:13:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.554437 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.554483 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.554500 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.554520 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.554535 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:40Z","lastTransitionTime":"2025-11-24T01:13:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.658059 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.658097 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.658106 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.658120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.658130 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:40Z","lastTransitionTime":"2025-11-24T01:13:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.761857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.761910 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.761954 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.761975 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.761987 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:40Z","lastTransitionTime":"2025-11-24T01:13:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.865780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.865851 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.865886 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.865919 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.865941 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:40Z","lastTransitionTime":"2025-11-24T01:13:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.969163 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.969224 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.969249 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.969277 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.969299 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:40Z","lastTransitionTime":"2025-11-24T01:13:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.996053 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:40 crc kubenswrapper[4755]: I1124 01:13:40.996161 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:40 crc kubenswrapper[4755]: E1124 01:13:40.996315 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:40 crc kubenswrapper[4755]: E1124 01:13:40.996463 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.071897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.071964 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.071990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.072018 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.072039 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:41Z","lastTransitionTime":"2025-11-24T01:13:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.175819 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.176277 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.176430 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.176655 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.176842 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:41Z","lastTransitionTime":"2025-11-24T01:13:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.279405 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.279705 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.279829 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.280059 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.280158 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:41Z","lastTransitionTime":"2025-11-24T01:13:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.384723 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.384775 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.384797 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.384830 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.384850 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:41Z","lastTransitionTime":"2025-11-24T01:13:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.488230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.488306 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.488346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.488379 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.488405 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:41Z","lastTransitionTime":"2025-11-24T01:13:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.591953 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.592010 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.592027 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.592051 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.592068 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:41Z","lastTransitionTime":"2025-11-24T01:13:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.694335 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.694402 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.694426 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.694453 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.694475 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:41Z","lastTransitionTime":"2025-11-24T01:13:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.797501 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.797555 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.797572 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.797593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.797636 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:41Z","lastTransitionTime":"2025-11-24T01:13:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.900223 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.900460 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.900592 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.900746 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.900868 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:41Z","lastTransitionTime":"2025-11-24T01:13:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.995712 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:41 crc kubenswrapper[4755]: I1124 01:13:41.995720 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:41 crc kubenswrapper[4755]: E1124 01:13:41.995955 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:41 crc kubenswrapper[4755]: E1124 01:13:41.996212 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.005643 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.005695 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.005709 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.005728 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.005746 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:42Z","lastTransitionTime":"2025-11-24T01:13:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.108979 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.109053 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.109076 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.109105 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.109124 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:42Z","lastTransitionTime":"2025-11-24T01:13:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.212204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.212255 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.212272 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.212297 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.212356 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:42Z","lastTransitionTime":"2025-11-24T01:13:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.314469 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.314505 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.314513 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.314527 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.314538 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:42Z","lastTransitionTime":"2025-11-24T01:13:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.417213 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.417267 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.417285 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.417308 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.417326 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:42Z","lastTransitionTime":"2025-11-24T01:13:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.520150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.520574 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.520809 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.521009 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.521143 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:42Z","lastTransitionTime":"2025-11-24T01:13:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.624544 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.624625 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.624641 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.624659 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.624672 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:42Z","lastTransitionTime":"2025-11-24T01:13:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.727763 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.727827 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.727842 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.727864 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.727877 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:42Z","lastTransitionTime":"2025-11-24T01:13:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.830974 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.831049 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.831066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.831091 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.831108 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:42Z","lastTransitionTime":"2025-11-24T01:13:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.933515 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.933580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.933625 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.933651 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.933668 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:42Z","lastTransitionTime":"2025-11-24T01:13:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.995573 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:42 crc kubenswrapper[4755]: I1124 01:13:42.995699 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:42 crc kubenswrapper[4755]: E1124 01:13:42.995823 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:42 crc kubenswrapper[4755]: E1124 01:13:42.995920 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.036415 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.036482 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.036500 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.036525 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.036543 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:43Z","lastTransitionTime":"2025-11-24T01:13:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.139918 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.139975 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.139992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.140016 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.140035 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:43Z","lastTransitionTime":"2025-11-24T01:13:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.242532 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.242955 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.243185 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.243412 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.243572 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:43Z","lastTransitionTime":"2025-11-24T01:13:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.347133 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.347199 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.347215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.347236 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.347256 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:43Z","lastTransitionTime":"2025-11-24T01:13:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.449669 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.449707 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.449718 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.449734 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.449745 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:43Z","lastTransitionTime":"2025-11-24T01:13:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.552467 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.552543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.552567 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.552596 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.552678 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:43Z","lastTransitionTime":"2025-11-24T01:13:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.655351 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.655418 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.655442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.655465 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.655480 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:43Z","lastTransitionTime":"2025-11-24T01:13:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.759821 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.759871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.759882 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.759898 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.759909 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:43Z","lastTransitionTime":"2025-11-24T01:13:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.862138 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.862201 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.862209 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.862223 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.862232 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:43Z","lastTransitionTime":"2025-11-24T01:13:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.964515 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.964589 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.964670 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.964727 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.964752 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:43Z","lastTransitionTime":"2025-11-24T01:13:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.996357 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.996358 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:43 crc kubenswrapper[4755]: E1124 01:13:43.996505 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:43 crc kubenswrapper[4755]: E1124 01:13:43.997017 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:43 crc kubenswrapper[4755]: I1124 01:13:43.997174 4755 scope.go:117] "RemoveContainer" containerID="7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.067384 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.067438 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.067455 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.067476 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.067491 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:44Z","lastTransitionTime":"2025-11-24T01:13:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.169793 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.169838 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.169852 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.169871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.169885 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:44Z","lastTransitionTime":"2025-11-24T01:13:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.271897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.271932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.271942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.271959 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.271973 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:44Z","lastTransitionTime":"2025-11-24T01:13:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.291040 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/1.log" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.293687 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.294206 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.309182 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.323489 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.336846 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.357058 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.365414 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.375103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.375343 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.375353 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.375367 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.375376 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:44Z","lastTransitionTime":"2025-11-24T01:13:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.376843 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.389837 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.401687 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.410295 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.433891 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.447232 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.460455 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.471649 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.476903 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.476945 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.476956 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.476970 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.476981 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:44Z","lastTransitionTime":"2025-11-24T01:13:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.490513 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.504103 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.520902 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.569207 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:44Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.579103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.579129 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.579137 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.579151 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.579160 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:44Z","lastTransitionTime":"2025-11-24T01:13:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.681436 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.681471 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.681480 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.681495 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.681504 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:44Z","lastTransitionTime":"2025-11-24T01:13:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.783529 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.783567 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.783576 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.783589 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.783597 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:44Z","lastTransitionTime":"2025-11-24T01:13:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.886547 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.886589 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.886598 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.886630 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.886644 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:44Z","lastTransitionTime":"2025-11-24T01:13:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.990127 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.990477 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.990487 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.990503 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.990516 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:44Z","lastTransitionTime":"2025-11-24T01:13:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.996701 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:44 crc kubenswrapper[4755]: I1124 01:13:44.996724 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:44 crc kubenswrapper[4755]: E1124 01:13:44.996824 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:44 crc kubenswrapper[4755]: E1124 01:13:44.996931 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.093081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.093150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.093168 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.093193 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.093212 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.196553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.196647 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.196697 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.196722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.196756 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.299722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.299769 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.299780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.299797 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.299809 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.302142 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/2.log" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.303026 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/1.log" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.307650 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5" exitCode=1 Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.307689 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5"} Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.307729 4755 scope.go:117] "RemoveContainer" containerID="7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.309208 4755 scope.go:117] "RemoveContainer" containerID="154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5" Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.309537 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.326265 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.353458 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.371470 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.392716 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.402707 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.402751 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.402766 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.402787 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.402802 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.407352 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.420236 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.433283 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.446924 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.456430 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.470774 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.482174 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.495244 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.505350 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.505401 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.505415 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.505433 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.505444 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.506917 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.507085 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.507145 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs podName:ccb86693-0b66-43ca-a2d1-e9594521d30f nodeName:}" failed. No retries permitted until 2025-11-24 01:14:01.507128158 +0000 UTC m=+66.193193669 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs") pod "network-metrics-daemon-9cl8m" (UID: "ccb86693-0b66-43ca-a2d1-e9594521d30f") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.511393 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.524450 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.543204 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.560427 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.574030 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.607082 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.607120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.607129 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.607142 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.607152 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.608144 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.608192 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.608204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.608220 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.608231 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.619262 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.622505 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.622541 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.622552 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.622566 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.622574 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.634866 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.637917 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.637962 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.637974 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.637990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.638002 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.649402 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.653420 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.653756 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.653846 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.653933 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.653991 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.666369 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.669384 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.669425 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.669435 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.669451 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.669463 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.680201 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:45Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.680305 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.708853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.708882 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.708892 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.708906 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.708914 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.811980 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.812050 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.812074 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.812110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.812137 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.915552 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.915922 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.915986 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.916019 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.916040 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:45Z","lastTransitionTime":"2025-11-24T01:13:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.996434 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.996787 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:45 crc kubenswrapper[4755]: I1124 01:13:45.996910 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:45 crc kubenswrapper[4755]: E1124 01:13:45.997099 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.016467 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.022266 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.022374 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.022398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.022429 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.022450 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:46Z","lastTransitionTime":"2025-11-24T01:13:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.034342 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.048341 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.061039 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.073425 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.087592 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.105233 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.118001 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.124138 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.124178 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.124189 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.124207 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.124219 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:46Z","lastTransitionTime":"2025-11-24T01:13:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.145917 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.158863 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.171139 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.180678 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.192101 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.197635 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.202067 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.211662 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.223266 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.226714 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.226783 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.226800 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.226828 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.226845 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:46Z","lastTransitionTime":"2025-11-24T01:13:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.236649 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.249739 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.262522 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.272811 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.283887 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.294174 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.308154 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.314048 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/2.log" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.317647 4755 scope.go:117] "RemoveContainer" containerID="154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5" Nov 24 01:13:46 crc kubenswrapper[4755]: E1124 01:13:46.318711 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.322553 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.329038 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.329071 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.329083 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.329100 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.329111 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:46Z","lastTransitionTime":"2025-11-24T01:13:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.339262 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.350137 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.374676 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.390755 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.406628 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.419429 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.434270 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.434315 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.434326 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.434343 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.434356 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:46Z","lastTransitionTime":"2025-11-24T01:13:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.448076 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d0548d371aaacdf149dc5ab3622d89462cb172cb3686889840db43146fe76f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"message\\\":\\\"ce-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996372 6172 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:26.996031 6172 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nF1124 01:13:26.996342 6172 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:26Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.466347 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.478731 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.495793 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.509816 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.521357 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.537193 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.537658 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.537687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.537698 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.537713 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.537723 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:46Z","lastTransitionTime":"2025-11-24T01:13:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.552157 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.562467 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.572087 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.581466 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.590818 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.600503 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.608076 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.616566 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.626519 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.637805 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.639715 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.639753 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.639766 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.639785 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.639797 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:46Z","lastTransitionTime":"2025-11-24T01:13:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.647794 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.665333 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.676813 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.687786 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.696577 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.713919 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.726655 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:46Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.742169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.742208 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.742219 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.742257 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.742267 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:46Z","lastTransitionTime":"2025-11-24T01:13:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.844574 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.844671 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.844697 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.844724 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.844744 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:46Z","lastTransitionTime":"2025-11-24T01:13:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.947582 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.947715 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.947733 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.947759 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.947775 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:46Z","lastTransitionTime":"2025-11-24T01:13:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.995926 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:46 crc kubenswrapper[4755]: I1124 01:13:46.995994 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:46 crc kubenswrapper[4755]: E1124 01:13:46.996051 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:46 crc kubenswrapper[4755]: E1124 01:13:46.996138 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.050359 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.050440 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.050454 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.050468 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.050479 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:47Z","lastTransitionTime":"2025-11-24T01:13:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.152859 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.152960 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.152980 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.153007 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.153025 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:47Z","lastTransitionTime":"2025-11-24T01:13:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.256071 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.256101 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.256109 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.256123 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.256133 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:47Z","lastTransitionTime":"2025-11-24T01:13:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.359042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.359087 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.359098 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.359111 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.359121 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:47Z","lastTransitionTime":"2025-11-24T01:13:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.462124 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.462164 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.462173 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.462186 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.462194 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:47Z","lastTransitionTime":"2025-11-24T01:13:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.564066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.564124 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.564140 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.564171 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.564187 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:47Z","lastTransitionTime":"2025-11-24T01:13:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.666391 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.666433 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.666442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.666455 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.666465 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:47Z","lastTransitionTime":"2025-11-24T01:13:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.735386 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.735454 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.735679 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.735689 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.735708 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.735716 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.735722 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.735732 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.735786 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 01:14:19.735768675 +0000 UTC m=+84.421834186 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.735805 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 01:14:19.735796946 +0000 UTC m=+84.421862457 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.769296 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.769349 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.769361 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.769377 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.769390 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:47Z","lastTransitionTime":"2025-11-24T01:13:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.836371 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.836477 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.836536 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.836641 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.836658 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.836688 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:14:19.836661395 +0000 UTC m=+84.522726896 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.836733 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:14:19.836725326 +0000 UTC m=+84.522790827 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.836746 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:14:19.836740177 +0000 UTC m=+84.522805678 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.871923 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.871959 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.871970 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.871986 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.871996 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:47Z","lastTransitionTime":"2025-11-24T01:13:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.974664 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.974736 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.974752 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.974778 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.974796 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:47Z","lastTransitionTime":"2025-11-24T01:13:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.996567 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.996718 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:47 crc kubenswrapper[4755]: I1124 01:13:47.997023 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:47 crc kubenswrapper[4755]: E1124 01:13:47.997094 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.077075 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.077117 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.077129 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.077145 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.077157 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:48Z","lastTransitionTime":"2025-11-24T01:13:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.180378 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.180452 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.180473 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.180501 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.180526 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:48Z","lastTransitionTime":"2025-11-24T01:13:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.283119 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.283153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.283163 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.283178 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.283187 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:48Z","lastTransitionTime":"2025-11-24T01:13:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.385427 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.385568 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.385583 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.385631 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.385648 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:48Z","lastTransitionTime":"2025-11-24T01:13:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.488294 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.488385 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.488439 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.488473 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.488493 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:48Z","lastTransitionTime":"2025-11-24T01:13:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.590696 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.590760 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.590778 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.590802 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.590819 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:48Z","lastTransitionTime":"2025-11-24T01:13:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.694003 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.694141 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.694168 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.694195 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.694217 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:48Z","lastTransitionTime":"2025-11-24T01:13:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.797311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.797380 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.797390 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.797403 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.797412 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:48Z","lastTransitionTime":"2025-11-24T01:13:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.899997 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.900080 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.900093 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.900112 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.900122 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:48Z","lastTransitionTime":"2025-11-24T01:13:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.995746 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:48 crc kubenswrapper[4755]: I1124 01:13:48.995823 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:48 crc kubenswrapper[4755]: E1124 01:13:48.995890 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:48 crc kubenswrapper[4755]: E1124 01:13:48.996037 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.002319 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.002370 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.002386 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.002409 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.002424 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:49Z","lastTransitionTime":"2025-11-24T01:13:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.104805 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.104836 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.104862 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.104874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.104884 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:49Z","lastTransitionTime":"2025-11-24T01:13:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.207069 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.207104 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.207112 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.207130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.207143 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:49Z","lastTransitionTime":"2025-11-24T01:13:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.309010 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.309081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.309098 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.309174 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.309187 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:49Z","lastTransitionTime":"2025-11-24T01:13:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.411586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.411669 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.411682 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.411699 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.411709 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:49Z","lastTransitionTime":"2025-11-24T01:13:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.514664 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.514720 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.514735 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.514759 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.514773 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:49Z","lastTransitionTime":"2025-11-24T01:13:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.617725 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.617779 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.617790 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.617811 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.617823 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:49Z","lastTransitionTime":"2025-11-24T01:13:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.720845 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.720904 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.720918 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.720940 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.720956 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:49Z","lastTransitionTime":"2025-11-24T01:13:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.824118 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.824180 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.824199 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.824224 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.824242 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:49Z","lastTransitionTime":"2025-11-24T01:13:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.932339 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.932398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.932418 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.932449 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.932718 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:49Z","lastTransitionTime":"2025-11-24T01:13:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.996226 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:49 crc kubenswrapper[4755]: I1124 01:13:49.996336 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:49 crc kubenswrapper[4755]: E1124 01:13:49.996402 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:49 crc kubenswrapper[4755]: E1124 01:13:49.996497 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.035775 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.035804 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.035812 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.035824 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.035832 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:50Z","lastTransitionTime":"2025-11-24T01:13:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.138181 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.138234 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.138245 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.138265 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.138277 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:50Z","lastTransitionTime":"2025-11-24T01:13:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.241041 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.241070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.241078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.241097 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.241115 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:50Z","lastTransitionTime":"2025-11-24T01:13:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.342994 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.343027 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.343036 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.343048 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.343057 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:50Z","lastTransitionTime":"2025-11-24T01:13:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.445546 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.445594 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.445628 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.445645 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.445655 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:50Z","lastTransitionTime":"2025-11-24T01:13:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.535438 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.549179 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.551543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.551813 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.551951 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.552094 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.552228 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:50Z","lastTransitionTime":"2025-11-24T01:13:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.566294 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.576763 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.593334 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.604484 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.620393 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.633507 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.652992 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.654483 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.654539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.654556 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.654576 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.654588 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:50Z","lastTransitionTime":"2025-11-24T01:13:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.669721 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.680253 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.690796 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.703099 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.715039 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.727530 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.737452 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.749012 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.756461 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.756501 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.756512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.756526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.756538 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:50Z","lastTransitionTime":"2025-11-24T01:13:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.758822 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.770379 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:50Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.858594 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.858646 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.858657 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.858672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.858682 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:50Z","lastTransitionTime":"2025-11-24T01:13:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.961895 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.961953 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.961970 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.961997 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.962014 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:50Z","lastTransitionTime":"2025-11-24T01:13:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.995587 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:50 crc kubenswrapper[4755]: I1124 01:13:50.995587 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:50 crc kubenswrapper[4755]: E1124 01:13:50.995823 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:50 crc kubenswrapper[4755]: E1124 01:13:50.995881 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.064841 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.064935 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.064964 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.064993 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.065017 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:51Z","lastTransitionTime":"2025-11-24T01:13:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.168494 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.168553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.168566 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.168585 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.168598 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:51Z","lastTransitionTime":"2025-11-24T01:13:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.271884 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.271969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.272002 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.272032 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.272052 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:51Z","lastTransitionTime":"2025-11-24T01:13:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.374770 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.374838 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.374856 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.374881 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.374899 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:51Z","lastTransitionTime":"2025-11-24T01:13:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.478142 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.478195 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.478216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.478237 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.478251 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:51Z","lastTransitionTime":"2025-11-24T01:13:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.581180 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.581246 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.581270 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.581300 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.581319 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:51Z","lastTransitionTime":"2025-11-24T01:13:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.683172 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.683214 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.683226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.683239 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.683252 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:51Z","lastTransitionTime":"2025-11-24T01:13:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.785050 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.785078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.785121 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.785142 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.785154 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:51Z","lastTransitionTime":"2025-11-24T01:13:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.888165 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.888208 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.888252 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.888270 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.888285 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:51Z","lastTransitionTime":"2025-11-24T01:13:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.990500 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.990568 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.990586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.990641 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.990660 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:51Z","lastTransitionTime":"2025-11-24T01:13:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.995791 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:51 crc kubenswrapper[4755]: E1124 01:13:51.995953 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:51 crc kubenswrapper[4755]: I1124 01:13:51.995971 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:51 crc kubenswrapper[4755]: E1124 01:13:51.996068 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.094303 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.094364 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.094381 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.094402 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.094416 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:52Z","lastTransitionTime":"2025-11-24T01:13:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.196446 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.196494 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.196502 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.196517 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.196526 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:52Z","lastTransitionTime":"2025-11-24T01:13:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.299242 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.299303 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.299320 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.299344 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.299364 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:52Z","lastTransitionTime":"2025-11-24T01:13:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.402799 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.402854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.402876 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.402904 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.402943 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:52Z","lastTransitionTime":"2025-11-24T01:13:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.505918 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.505967 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.505981 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.505999 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.506010 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:52Z","lastTransitionTime":"2025-11-24T01:13:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.608903 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.608972 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.608992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.609015 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.609034 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:52Z","lastTransitionTime":"2025-11-24T01:13:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.711790 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.711840 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.711851 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.711871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.711888 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:52Z","lastTransitionTime":"2025-11-24T01:13:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.814116 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.814162 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.814175 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.814191 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.814204 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:52Z","lastTransitionTime":"2025-11-24T01:13:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.917282 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.917347 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.917363 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.917388 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.917405 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:52Z","lastTransitionTime":"2025-11-24T01:13:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.995764 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:52 crc kubenswrapper[4755]: I1124 01:13:52.995906 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:52 crc kubenswrapper[4755]: E1124 01:13:52.996027 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:52 crc kubenswrapper[4755]: E1124 01:13:52.996194 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.019541 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.019643 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.019659 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.019680 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.019694 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:53Z","lastTransitionTime":"2025-11-24T01:13:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.123200 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.123270 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.123296 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.123324 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.123349 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:53Z","lastTransitionTime":"2025-11-24T01:13:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.226694 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.226758 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.226775 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.226806 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.226826 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:53Z","lastTransitionTime":"2025-11-24T01:13:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.329323 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.329390 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.329411 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.329440 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.329462 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:53Z","lastTransitionTime":"2025-11-24T01:13:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.432342 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.432384 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.432395 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.432417 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.432432 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:53Z","lastTransitionTime":"2025-11-24T01:13:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.535443 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.535522 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.535549 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.535576 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.535591 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:53Z","lastTransitionTime":"2025-11-24T01:13:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.638352 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.638419 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.638437 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.638462 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.638480 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:53Z","lastTransitionTime":"2025-11-24T01:13:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.740979 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.741038 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.741055 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.741081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.741101 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:53Z","lastTransitionTime":"2025-11-24T01:13:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.844024 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.844082 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.844092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.844110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.844121 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:53Z","lastTransitionTime":"2025-11-24T01:13:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.946573 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.946787 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.946846 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.946866 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.946877 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:53Z","lastTransitionTime":"2025-11-24T01:13:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.995916 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:53 crc kubenswrapper[4755]: I1124 01:13:53.995911 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:53 crc kubenswrapper[4755]: E1124 01:13:53.996079 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:53 crc kubenswrapper[4755]: E1124 01:13:53.996145 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.049788 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.049854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.049876 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.049911 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.049947 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:54Z","lastTransitionTime":"2025-11-24T01:13:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.157182 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.157282 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.157337 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.157365 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.157382 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:54Z","lastTransitionTime":"2025-11-24T01:13:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.260460 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.260506 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.260515 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.260533 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.260543 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:54Z","lastTransitionTime":"2025-11-24T01:13:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.364323 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.364384 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.364406 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.364435 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.364455 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:54Z","lastTransitionTime":"2025-11-24T01:13:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.467874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.467940 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.467962 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.467989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.468010 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:54Z","lastTransitionTime":"2025-11-24T01:13:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.571089 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.571156 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.571173 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.571195 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.571209 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:54Z","lastTransitionTime":"2025-11-24T01:13:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.674258 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.674302 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.674315 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.674337 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.674350 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:54Z","lastTransitionTime":"2025-11-24T01:13:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.777003 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.777056 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.777079 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.777099 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.777113 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:54Z","lastTransitionTime":"2025-11-24T01:13:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.879517 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.879576 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.879636 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.879668 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.879690 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:54Z","lastTransitionTime":"2025-11-24T01:13:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.983310 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.983444 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.983463 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.983518 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.983722 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:54Z","lastTransitionTime":"2025-11-24T01:13:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.996159 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:54 crc kubenswrapper[4755]: I1124 01:13:54.996268 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:54 crc kubenswrapper[4755]: E1124 01:13:54.996296 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:54 crc kubenswrapper[4755]: E1124 01:13:54.996499 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.086273 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.086373 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.086391 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.086415 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.086432 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:55Z","lastTransitionTime":"2025-11-24T01:13:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.188782 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.188838 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.188849 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.188867 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.188878 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:55Z","lastTransitionTime":"2025-11-24T01:13:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.291558 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.291658 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.291682 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.291709 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.291729 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:55Z","lastTransitionTime":"2025-11-24T01:13:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.394711 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.394770 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.394793 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.394823 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.394844 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:55Z","lastTransitionTime":"2025-11-24T01:13:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.497844 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.497907 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.497930 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.497959 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.497980 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:55Z","lastTransitionTime":"2025-11-24T01:13:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.601448 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.601516 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.601534 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.601561 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.601580 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:55Z","lastTransitionTime":"2025-11-24T01:13:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.704546 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.704655 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.704686 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.704722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.704744 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:55Z","lastTransitionTime":"2025-11-24T01:13:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.807182 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.807238 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.807254 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.807276 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.807293 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:55Z","lastTransitionTime":"2025-11-24T01:13:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.910422 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.910461 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.910469 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.910484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.910494 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:55Z","lastTransitionTime":"2025-11-24T01:13:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.996976 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:55 crc kubenswrapper[4755]: I1124 01:13:55.996995 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:55 crc kubenswrapper[4755]: E1124 01:13:55.997169 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:55 crc kubenswrapper[4755]: E1124 01:13:55.997378 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.012221 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.012434 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.012472 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.012484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.012500 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.012512 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.024149 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.025788 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.025860 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.025872 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.025914 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.025927 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: E1124 01:13:56.038542 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.040381 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.041678 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.041713 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.041723 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.041763 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.041778 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: E1124 01:13:56.052351 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.052735 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.055196 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.055223 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.055248 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.055260 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.055269 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: E1124 01:13:56.065555 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.065724 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.075510 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.075540 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.075548 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.075562 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.075594 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.082921 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: E1124 01:13:56.108459 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.113792 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.113839 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.113884 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.113906 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.113922 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.117962 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: E1124 01:13:56.126923 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: E1124 01:13:56.127076 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.128236 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.128286 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.128312 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.128326 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.128336 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.139227 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.154038 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.167800 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.177902 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.190765 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.201529 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.211304 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.219759 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.230509 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.230539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.230548 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.230578 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.230590 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.236080 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.248578 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.259675 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:13:56Z is after 2025-08-24T17:21:41Z" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.332474 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.332504 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.332513 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.332527 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.332536 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.435629 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.436025 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.436042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.436063 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.436079 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.538782 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.538927 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.538945 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.539068 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.539087 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.641473 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.641519 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.641529 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.641543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.641552 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.743675 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.743720 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.743731 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.743749 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.743762 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.847028 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.847075 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.847090 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.847107 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.847120 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.949741 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.949798 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.949814 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.949837 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.949853 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:56Z","lastTransitionTime":"2025-11-24T01:13:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.995997 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:56 crc kubenswrapper[4755]: I1124 01:13:56.996042 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:56 crc kubenswrapper[4755]: E1124 01:13:56.996184 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:56 crc kubenswrapper[4755]: E1124 01:13:56.996285 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.052919 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.052999 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.053045 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.053071 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.053094 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:57Z","lastTransitionTime":"2025-11-24T01:13:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.156148 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.156240 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.156259 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.156311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.156331 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:57Z","lastTransitionTime":"2025-11-24T01:13:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.259672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.259750 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.259760 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.259794 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.259805 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:57Z","lastTransitionTime":"2025-11-24T01:13:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.362868 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.362905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.362918 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.362933 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.362945 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:57Z","lastTransitionTime":"2025-11-24T01:13:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.465595 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.465672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.465684 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.465704 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.465717 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:57Z","lastTransitionTime":"2025-11-24T01:13:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.568622 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.568674 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.568703 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.568720 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.568732 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:57Z","lastTransitionTime":"2025-11-24T01:13:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.670893 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.670941 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.670952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.670968 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.670980 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:57Z","lastTransitionTime":"2025-11-24T01:13:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.773740 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.773818 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.773840 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.773870 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.773891 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:57Z","lastTransitionTime":"2025-11-24T01:13:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.884030 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.884064 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.884073 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.884087 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.884096 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:57Z","lastTransitionTime":"2025-11-24T01:13:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.986698 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.986737 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.986747 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.986762 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.986772 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:57Z","lastTransitionTime":"2025-11-24T01:13:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.996246 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:57 crc kubenswrapper[4755]: I1124 01:13:57.996297 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:57 crc kubenswrapper[4755]: E1124 01:13:57.996414 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:57 crc kubenswrapper[4755]: E1124 01:13:57.996568 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.089055 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.089087 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.089097 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.089112 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.089122 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:58Z","lastTransitionTime":"2025-11-24T01:13:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.191989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.192043 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.192061 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.192084 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.192111 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:58Z","lastTransitionTime":"2025-11-24T01:13:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.295576 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.295687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.295708 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.295734 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.295752 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:58Z","lastTransitionTime":"2025-11-24T01:13:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.399059 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.399134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.399157 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.399185 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.399206 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:58Z","lastTransitionTime":"2025-11-24T01:13:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.502317 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.502398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.502426 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.502461 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.502482 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:58Z","lastTransitionTime":"2025-11-24T01:13:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.604887 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.604944 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.605003 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.605029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.605047 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:58Z","lastTransitionTime":"2025-11-24T01:13:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.707460 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.707511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.707522 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.707541 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.707553 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:58Z","lastTransitionTime":"2025-11-24T01:13:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.810133 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.810205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.810229 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.810263 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.810287 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:58Z","lastTransitionTime":"2025-11-24T01:13:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.913654 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.913733 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.913749 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.913772 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.913788 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:58Z","lastTransitionTime":"2025-11-24T01:13:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.996199 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:13:58 crc kubenswrapper[4755]: I1124 01:13:58.996199 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:13:58 crc kubenswrapper[4755]: E1124 01:13:58.996459 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:13:58 crc kubenswrapper[4755]: E1124 01:13:58.996538 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.016046 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.016102 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.016119 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.016140 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.016152 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:59Z","lastTransitionTime":"2025-11-24T01:13:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.119012 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.119088 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.119110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.119138 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.119159 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:59Z","lastTransitionTime":"2025-11-24T01:13:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.221307 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.221346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.221358 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.221373 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.221384 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:59Z","lastTransitionTime":"2025-11-24T01:13:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.324833 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.324908 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.324940 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.324981 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.325009 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:59Z","lastTransitionTime":"2025-11-24T01:13:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.426999 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.427046 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.427061 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.427078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.427089 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:59Z","lastTransitionTime":"2025-11-24T01:13:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.529648 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.529682 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.529692 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.529707 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.529718 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:59Z","lastTransitionTime":"2025-11-24T01:13:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.633484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.633560 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.633584 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.633650 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.633675 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:59Z","lastTransitionTime":"2025-11-24T01:13:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.736296 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.736365 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.736387 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.736415 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.736439 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:59Z","lastTransitionTime":"2025-11-24T01:13:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.839042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.839086 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.839100 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.839119 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.839133 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:59Z","lastTransitionTime":"2025-11-24T01:13:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.941938 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.941987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.941999 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.942020 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.942033 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:13:59Z","lastTransitionTime":"2025-11-24T01:13:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.995997 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:13:59 crc kubenswrapper[4755]: I1124 01:13:59.996098 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:13:59 crc kubenswrapper[4755]: E1124 01:13:59.996136 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:13:59 crc kubenswrapper[4755]: E1124 01:13:59.996390 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.044937 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.045082 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.045107 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.045138 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.045160 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:00Z","lastTransitionTime":"2025-11-24T01:14:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.148070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.148129 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.148146 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.148168 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.148186 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:00Z","lastTransitionTime":"2025-11-24T01:14:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.251539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.251628 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.251640 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.251658 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.251670 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:00Z","lastTransitionTime":"2025-11-24T01:14:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.354312 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.354367 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.354385 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.354411 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.354428 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:00Z","lastTransitionTime":"2025-11-24T01:14:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.458443 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.458497 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.458508 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.458525 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.458536 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:00Z","lastTransitionTime":"2025-11-24T01:14:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.561687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.561723 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.561735 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.561753 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.561765 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:00Z","lastTransitionTime":"2025-11-24T01:14:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.664986 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.665050 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.665090 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.665110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.665123 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:00Z","lastTransitionTime":"2025-11-24T01:14:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.767597 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.767661 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.767674 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.767693 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.767706 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:00Z","lastTransitionTime":"2025-11-24T01:14:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.870433 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.870461 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.870471 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.870485 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.870496 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:00Z","lastTransitionTime":"2025-11-24T01:14:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.972661 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.972726 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.972740 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.972758 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.972770 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:00Z","lastTransitionTime":"2025-11-24T01:14:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.996469 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:00 crc kubenswrapper[4755]: E1124 01:14:00.996625 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:00 crc kubenswrapper[4755]: I1124 01:14:00.996833 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:00 crc kubenswrapper[4755]: E1124 01:14:00.996911 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.075112 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.075175 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.075193 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.075220 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.075238 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:01Z","lastTransitionTime":"2025-11-24T01:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.178833 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.178918 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.178928 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.178942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.178951 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:01Z","lastTransitionTime":"2025-11-24T01:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.281121 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.281166 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.281177 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.281198 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.281212 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:01Z","lastTransitionTime":"2025-11-24T01:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.384518 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.384577 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.384597 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.384658 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.384682 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:01Z","lastTransitionTime":"2025-11-24T01:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.487390 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.487445 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.487457 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.487474 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.487485 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:01Z","lastTransitionTime":"2025-11-24T01:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.589541 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.589591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.589619 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.589636 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.589644 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:01Z","lastTransitionTime":"2025-11-24T01:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.596130 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:01 crc kubenswrapper[4755]: E1124 01:14:01.596241 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:14:01 crc kubenswrapper[4755]: E1124 01:14:01.596303 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs podName:ccb86693-0b66-43ca-a2d1-e9594521d30f nodeName:}" failed. No retries permitted until 2025-11-24 01:14:33.596285886 +0000 UTC m=+98.282351387 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs") pod "network-metrics-daemon-9cl8m" (UID: "ccb86693-0b66-43ca-a2d1-e9594521d30f") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.692838 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.692903 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.692917 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.692932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.692967 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:01Z","lastTransitionTime":"2025-11-24T01:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.795099 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.795154 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.795176 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.795204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.795225 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:01Z","lastTransitionTime":"2025-11-24T01:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.897834 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.897903 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.897920 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.897943 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.897963 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:01Z","lastTransitionTime":"2025-11-24T01:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.995975 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.996010 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:01 crc kubenswrapper[4755]: E1124 01:14:01.996332 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:01 crc kubenswrapper[4755]: E1124 01:14:01.996826 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:01 crc kubenswrapper[4755]: I1124 01:14:01.997220 4755 scope.go:117] "RemoveContainer" containerID="154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5" Nov 24 01:14:01 crc kubenswrapper[4755]: E1124 01:14:01.997454 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:01.999993 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.000022 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.000033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.000047 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.000058 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:02Z","lastTransitionTime":"2025-11-24T01:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.101989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.102040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.102066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.102087 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.102102 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:02Z","lastTransitionTime":"2025-11-24T01:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.203989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.204025 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.204064 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.204081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.204091 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:02Z","lastTransitionTime":"2025-11-24T01:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.306029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.306073 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.306106 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.306126 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.306138 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:02Z","lastTransitionTime":"2025-11-24T01:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.408352 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.408398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.408412 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.408449 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.408461 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:02Z","lastTransitionTime":"2025-11-24T01:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.511250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.511286 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.511297 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.511313 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.511324 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:02Z","lastTransitionTime":"2025-11-24T01:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.614060 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.614115 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.614128 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.614143 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.614152 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:02Z","lastTransitionTime":"2025-11-24T01:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.716546 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.716585 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.716595 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.716645 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.716656 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:02Z","lastTransitionTime":"2025-11-24T01:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.819150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.819239 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.819275 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.819310 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.819338 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:02Z","lastTransitionTime":"2025-11-24T01:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.921866 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.921904 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.921912 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.921926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.921936 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:02Z","lastTransitionTime":"2025-11-24T01:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.996456 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:02 crc kubenswrapper[4755]: I1124 01:14:02.996456 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:02 crc kubenswrapper[4755]: E1124 01:14:02.996661 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:02 crc kubenswrapper[4755]: E1124 01:14:02.996741 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.023452 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.023526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.023542 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.023559 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.023570 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:03Z","lastTransitionTime":"2025-11-24T01:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.127077 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.127180 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.127202 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.127230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.127245 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:03Z","lastTransitionTime":"2025-11-24T01:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.230067 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.230107 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.230119 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.230138 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.230169 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:03Z","lastTransitionTime":"2025-11-24T01:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.333394 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.333853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.333917 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.333943 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.334269 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:03Z","lastTransitionTime":"2025-11-24T01:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.437323 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.437389 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.437413 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.437442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.437466 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:03Z","lastTransitionTime":"2025-11-24T01:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.539984 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.540085 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.540104 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.540129 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.540147 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:03Z","lastTransitionTime":"2025-11-24T01:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.642863 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.642923 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.642938 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.642965 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.642985 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:03Z","lastTransitionTime":"2025-11-24T01:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.744596 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.744699 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.744717 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.744743 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.744760 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:03Z","lastTransitionTime":"2025-11-24T01:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.846896 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.846930 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.846941 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.846955 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.846964 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:03Z","lastTransitionTime":"2025-11-24T01:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.949052 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.949127 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.949151 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.949180 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.949201 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:03Z","lastTransitionTime":"2025-11-24T01:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.996091 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:03 crc kubenswrapper[4755]: E1124 01:14:03.996225 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:03 crc kubenswrapper[4755]: I1124 01:14:03.996095 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:03 crc kubenswrapper[4755]: E1124 01:14:03.996469 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.052209 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.052261 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.052276 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.052299 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.052316 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:04Z","lastTransitionTime":"2025-11-24T01:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.154846 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.154911 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.154923 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.154939 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.154952 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:04Z","lastTransitionTime":"2025-11-24T01:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.257362 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.257432 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.257448 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.257471 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.257487 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:04Z","lastTransitionTime":"2025-11-24T01:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.362077 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.362138 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.362155 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.362182 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.362199 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:04Z","lastTransitionTime":"2025-11-24T01:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.381971 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/0.log" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.382065 4755 generic.go:334] "Generic (PLEG): container finished" podID="19dbf7ff-f684-4c57-803a-83b39e0705a4" containerID="090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4" exitCode=1 Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.382176 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8pm69" event={"ID":"19dbf7ff-f684-4c57-803a-83b39e0705a4","Type":"ContainerDied","Data":"090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4"} Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.382747 4755 scope.go:117] "RemoveContainer" containerID="090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.405415 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.420519 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.438002 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.451452 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.469007 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.471476 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.471506 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.471518 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.471534 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.471545 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:04Z","lastTransitionTime":"2025-11-24T01:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.483663 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:14:03Z\\\",\\\"message\\\":\\\"2025-11-24T01:13:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8\\\\n2025-11-24T01:13:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8 to /host/opt/cni/bin/\\\\n2025-11-24T01:13:18Z [verbose] multus-daemon started\\\\n2025-11-24T01:13:18Z [verbose] Readiness Indicator file check\\\\n2025-11-24T01:14:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.495058 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.504684 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.517255 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.533988 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.549630 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.571698 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.573798 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.573845 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.573857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.573874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.573887 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:04Z","lastTransitionTime":"2025-11-24T01:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.586733 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.601910 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.612307 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.635719 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.651078 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.666743 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:04Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.676842 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.676905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.676922 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.676945 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.676960 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:04Z","lastTransitionTime":"2025-11-24T01:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.779379 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.779411 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.779422 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.779437 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.779448 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:04Z","lastTransitionTime":"2025-11-24T01:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.882865 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.882916 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.882932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.882954 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:04 crc kubenswrapper[4755]: I1124 01:14:04.882970 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:04Z","lastTransitionTime":"2025-11-24T01:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.015943 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:05 crc kubenswrapper[4755]: E1124 01:14:05.016126 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.016214 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:05 crc kubenswrapper[4755]: E1124 01:14:05.016483 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.018187 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.018312 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.018335 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.018360 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.018377 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:05Z","lastTransitionTime":"2025-11-24T01:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.121441 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.121493 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.121511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.121533 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.121549 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:05Z","lastTransitionTime":"2025-11-24T01:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.224687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.224750 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.224766 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.224791 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.224808 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:05Z","lastTransitionTime":"2025-11-24T01:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.327583 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.327698 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.327720 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.327747 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.327766 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:05Z","lastTransitionTime":"2025-11-24T01:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.387335 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/0.log" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.387451 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8pm69" event={"ID":"19dbf7ff-f684-4c57-803a-83b39e0705a4","Type":"ContainerStarted","Data":"edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.406655 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.419687 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.430859 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.430918 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.430935 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.430959 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.430976 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:05Z","lastTransitionTime":"2025-11-24T01:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.439600 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.459263 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.478077 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.495815 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.512176 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.527687 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.532899 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.532952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.532963 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.532978 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.532989 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:05Z","lastTransitionTime":"2025-11-24T01:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.539336 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.552328 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.567626 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:14:03Z\\\",\\\"message\\\":\\\"2025-11-24T01:13:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8\\\\n2025-11-24T01:13:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8 to /host/opt/cni/bin/\\\\n2025-11-24T01:13:18Z [verbose] multus-daemon started\\\\n2025-11-24T01:13:18Z [verbose] Readiness Indicator file check\\\\n2025-11-24T01:14:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.578563 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.590120 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.600379 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.616888 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.629937 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.634956 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.634990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.635004 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.635021 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.635035 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:05Z","lastTransitionTime":"2025-11-24T01:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.650817 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.662799 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:05Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.738337 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.738406 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.738431 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.738461 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.738484 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:05Z","lastTransitionTime":"2025-11-24T01:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.841590 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.841645 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.841656 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.841674 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.841686 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:05Z","lastTransitionTime":"2025-11-24T01:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.944708 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.944764 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.944786 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.944815 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.944837 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:05Z","lastTransitionTime":"2025-11-24T01:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.996520 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:05 crc kubenswrapper[4755]: I1124 01:14:05.996691 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:05 crc kubenswrapper[4755]: E1124 01:14:05.996837 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:05 crc kubenswrapper[4755]: E1124 01:14:05.996941 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.009400 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.016788 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.036578 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.046556 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.046623 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.046636 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.046654 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.046667 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.056512 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.070581 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.083541 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.095800 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.109475 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:14:03Z\\\",\\\"message\\\":\\\"2025-11-24T01:13:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8\\\\n2025-11-24T01:13:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8 to /host/opt/cni/bin/\\\\n2025-11-24T01:13:18Z [verbose] multus-daemon started\\\\n2025-11-24T01:13:18Z [verbose] Readiness Indicator file check\\\\n2025-11-24T01:14:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.123453 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.155926 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.158023 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.158057 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.158068 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.158081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.158090 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.179096 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.195370 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.214683 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.226260 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.244209 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.245046 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.245081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.245091 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.245108 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.245119 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.259421 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: E1124 01:14:06.260669 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.263617 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.263650 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.263664 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.263681 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.263691 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.272594 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: E1124 01:14:06.274840 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.280499 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.280559 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.280573 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.280595 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.280640 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.283886 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: E1124 01:14:06.294346 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.297738 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.297773 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.297788 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.297805 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.297815 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.305679 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: E1124 01:14:06.308838 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.312006 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.312054 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.312064 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.312080 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.312090 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: E1124 01:14:06.333320 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:06Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:06 crc kubenswrapper[4755]: E1124 01:14:06.333463 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.335539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.335700 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.335816 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.335960 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.335993 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.438162 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.438203 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.438215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.438232 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.438244 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.540550 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.540594 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.540626 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.540642 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.540652 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.643595 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.643774 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.643794 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.643820 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.643878 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.746557 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.746597 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.746624 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.746641 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.746651 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.850147 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.850182 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.850190 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.850205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.850215 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.952694 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.952747 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.952758 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.952773 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.952784 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:06Z","lastTransitionTime":"2025-11-24T01:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.996410 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:06 crc kubenswrapper[4755]: E1124 01:14:06.996529 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:06 crc kubenswrapper[4755]: I1124 01:14:06.996814 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:06 crc kubenswrapper[4755]: E1124 01:14:06.997097 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.056060 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.056094 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.056103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.056116 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.056125 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:07Z","lastTransitionTime":"2025-11-24T01:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.159268 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.159307 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.159317 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.159334 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.159346 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:07Z","lastTransitionTime":"2025-11-24T01:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.262694 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.262739 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.262761 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.262776 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.262786 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:07Z","lastTransitionTime":"2025-11-24T01:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.365271 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.365322 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.365336 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.365354 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.365367 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:07Z","lastTransitionTime":"2025-11-24T01:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.468422 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.468500 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.468512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.468532 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.468544 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:07Z","lastTransitionTime":"2025-11-24T01:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.571687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.571763 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.571784 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.571811 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.571827 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:07Z","lastTransitionTime":"2025-11-24T01:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.674650 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.674716 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.674738 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.674764 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.674783 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:07Z","lastTransitionTime":"2025-11-24T01:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.776893 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.776971 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.776995 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.777023 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.777039 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:07Z","lastTransitionTime":"2025-11-24T01:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.879776 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.879813 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.879823 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.879838 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.879847 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:07Z","lastTransitionTime":"2025-11-24T01:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.982575 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.982856 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.982963 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.983070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.983158 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:07Z","lastTransitionTime":"2025-11-24T01:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.995868 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:07 crc kubenswrapper[4755]: I1124 01:14:07.995899 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:07 crc kubenswrapper[4755]: E1124 01:14:07.996004 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:07 crc kubenswrapper[4755]: E1124 01:14:07.996079 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.085461 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.085501 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.085511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.085527 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.085536 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:08Z","lastTransitionTime":"2025-11-24T01:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.187870 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.187912 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.187926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.187949 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.187964 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:08Z","lastTransitionTime":"2025-11-24T01:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.290195 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.290233 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.290241 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.290256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.290265 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:08Z","lastTransitionTime":"2025-11-24T01:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.391838 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.391879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.391891 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.391907 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.391916 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:08Z","lastTransitionTime":"2025-11-24T01:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.494672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.494746 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.494780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.494810 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.494829 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:08Z","lastTransitionTime":"2025-11-24T01:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.597384 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.597440 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.597457 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.597480 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.597497 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:08Z","lastTransitionTime":"2025-11-24T01:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.705015 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.705063 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.705077 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.705095 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.705112 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:08Z","lastTransitionTime":"2025-11-24T01:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.807929 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.807972 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.807988 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.808012 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.808029 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:08Z","lastTransitionTime":"2025-11-24T01:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.910339 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.910371 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.910385 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.910403 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.910414 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:08Z","lastTransitionTime":"2025-11-24T01:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.996415 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:08 crc kubenswrapper[4755]: E1124 01:14:08.996522 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:08 crc kubenswrapper[4755]: I1124 01:14:08.996426 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:08 crc kubenswrapper[4755]: E1124 01:14:08.996809 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.012109 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.012193 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.012202 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.012213 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.012437 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:09Z","lastTransitionTime":"2025-11-24T01:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.115045 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.115093 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.115109 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.115133 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.115149 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:09Z","lastTransitionTime":"2025-11-24T01:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.218622 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.218661 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.218699 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.218720 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.218732 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:09Z","lastTransitionTime":"2025-11-24T01:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.322291 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.322341 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.322355 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.322372 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.322383 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:09Z","lastTransitionTime":"2025-11-24T01:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.427320 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.427367 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.427376 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.427390 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.427404 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:09Z","lastTransitionTime":"2025-11-24T01:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.530374 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.530452 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.530480 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.530511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.530536 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:09Z","lastTransitionTime":"2025-11-24T01:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.632912 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.632983 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.633042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.633070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.633093 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:09Z","lastTransitionTime":"2025-11-24T01:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.735941 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.735998 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.736015 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.736040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.736058 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:09Z","lastTransitionTime":"2025-11-24T01:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.838141 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.838186 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.838198 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.838215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.838229 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:09Z","lastTransitionTime":"2025-11-24T01:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.941164 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.941226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.941245 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.941271 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.941287 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:09Z","lastTransitionTime":"2025-11-24T01:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.996493 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:09 crc kubenswrapper[4755]: E1124 01:14:09.996848 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:09 crc kubenswrapper[4755]: I1124 01:14:09.996921 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:09 crc kubenswrapper[4755]: E1124 01:14:09.997081 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.044918 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.044962 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.044975 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.044994 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.045006 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:10Z","lastTransitionTime":"2025-11-24T01:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.148414 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.148576 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.148671 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.148709 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.148787 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:10Z","lastTransitionTime":"2025-11-24T01:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.251653 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.251702 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.251720 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.251742 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.251758 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:10Z","lastTransitionTime":"2025-11-24T01:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.354058 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.354370 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.354396 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.354415 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.354433 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:10Z","lastTransitionTime":"2025-11-24T01:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.457837 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.457922 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.457948 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.457977 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.457997 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:10Z","lastTransitionTime":"2025-11-24T01:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.563036 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.563092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.563115 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.563144 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.563167 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:10Z","lastTransitionTime":"2025-11-24T01:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.665687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.665734 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.665747 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.665765 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.665779 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:10Z","lastTransitionTime":"2025-11-24T01:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.768949 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.769294 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.769480 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.769653 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.769808 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:10Z","lastTransitionTime":"2025-11-24T01:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.873405 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.873905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.874129 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.874352 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.874564 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:10Z","lastTransitionTime":"2025-11-24T01:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.978484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.978570 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.978590 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.978666 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.978692 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:10Z","lastTransitionTime":"2025-11-24T01:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.996453 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:10 crc kubenswrapper[4755]: I1124 01:14:10.996573 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:10 crc kubenswrapper[4755]: E1124 01:14:10.996832 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:10 crc kubenswrapper[4755]: E1124 01:14:10.996957 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.081515 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.081576 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.081595 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.081666 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.081705 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:11Z","lastTransitionTime":"2025-11-24T01:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.184589 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.184673 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.184688 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.184709 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.184724 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:11Z","lastTransitionTime":"2025-11-24T01:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.287950 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.288034 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.288061 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.288091 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.288115 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:11Z","lastTransitionTime":"2025-11-24T01:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.390834 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.390865 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.390875 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.390887 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.390895 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:11Z","lastTransitionTime":"2025-11-24T01:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.498095 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.498256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.498826 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.498864 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.498882 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:11Z","lastTransitionTime":"2025-11-24T01:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.601750 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.601803 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.601818 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.601839 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.601855 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:11Z","lastTransitionTime":"2025-11-24T01:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.704154 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.704204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.704215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.704233 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.704244 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:11Z","lastTransitionTime":"2025-11-24T01:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.807048 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.807097 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.807109 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.807127 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.807138 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:11Z","lastTransitionTime":"2025-11-24T01:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.910008 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.910049 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.910067 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.910084 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.910096 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:11Z","lastTransitionTime":"2025-11-24T01:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.996664 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:11 crc kubenswrapper[4755]: I1124 01:14:11.996737 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:11 crc kubenswrapper[4755]: E1124 01:14:11.996874 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:11 crc kubenswrapper[4755]: E1124 01:14:11.997061 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.012252 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.012301 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.012312 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.012327 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.012339 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:12Z","lastTransitionTime":"2025-11-24T01:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.114111 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.114169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.114204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.114241 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.114263 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:12Z","lastTransitionTime":"2025-11-24T01:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.216281 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.216351 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.216368 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.216390 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.216408 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:12Z","lastTransitionTime":"2025-11-24T01:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.319146 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.319236 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.319309 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.319345 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.319368 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:12Z","lastTransitionTime":"2025-11-24T01:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.421766 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.421830 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.421848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.421870 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.421887 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:12Z","lastTransitionTime":"2025-11-24T01:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.525031 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.525102 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.525130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.525164 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.525188 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:12Z","lastTransitionTime":"2025-11-24T01:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.628575 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.628760 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.628797 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.628831 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.628857 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:12Z","lastTransitionTime":"2025-11-24T01:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.732469 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.732596 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.732658 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.732733 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.732760 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:12Z","lastTransitionTime":"2025-11-24T01:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.835685 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.835762 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.835792 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.835821 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.835841 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:12Z","lastTransitionTime":"2025-11-24T01:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.938275 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.938316 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.938324 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.938338 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.938347 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:12Z","lastTransitionTime":"2025-11-24T01:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.996392 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:12 crc kubenswrapper[4755]: I1124 01:14:12.996434 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:12 crc kubenswrapper[4755]: E1124 01:14:12.996699 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:12 crc kubenswrapper[4755]: E1124 01:14:12.996823 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.040719 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.040781 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.040801 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.040825 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.040844 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:13Z","lastTransitionTime":"2025-11-24T01:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.143442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.143491 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.143505 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.143523 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.143535 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:13Z","lastTransitionTime":"2025-11-24T01:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.246097 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.246151 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.246164 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.246181 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.246193 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:13Z","lastTransitionTime":"2025-11-24T01:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.349752 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.349793 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.349804 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.349822 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.349834 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:13Z","lastTransitionTime":"2025-11-24T01:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.452559 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.452648 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.452672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.452692 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.452713 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:13Z","lastTransitionTime":"2025-11-24T01:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.555956 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.556015 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.556032 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.556055 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.556073 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:13Z","lastTransitionTime":"2025-11-24T01:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.658702 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.658752 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.658764 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.658781 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.658795 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:13Z","lastTransitionTime":"2025-11-24T01:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.760914 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.761001 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.761025 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.761056 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.761080 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:13Z","lastTransitionTime":"2025-11-24T01:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.864665 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.864740 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.864761 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.864791 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.864812 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:13Z","lastTransitionTime":"2025-11-24T01:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.967569 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.967613 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.967623 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.967635 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.967644 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:13Z","lastTransitionTime":"2025-11-24T01:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.996329 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:13 crc kubenswrapper[4755]: E1124 01:14:13.996580 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.996684 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:13 crc kubenswrapper[4755]: E1124 01:14:13.997346 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:13 crc kubenswrapper[4755]: I1124 01:14:13.997774 4755 scope.go:117] "RemoveContainer" containerID="154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.070349 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.070424 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.070450 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.070502 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.070526 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:14Z","lastTransitionTime":"2025-11-24T01:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.175092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.175145 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.175166 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.175194 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.175214 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:14Z","lastTransitionTime":"2025-11-24T01:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.277656 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.277693 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.277701 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.277716 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.277725 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:14Z","lastTransitionTime":"2025-11-24T01:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.380971 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.380996 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.381004 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.381016 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.381024 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:14Z","lastTransitionTime":"2025-11-24T01:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.416332 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/2.log" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.418805 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.420225 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.449674 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.467458 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.479470 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.482895 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.482919 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.482929 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.482943 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.482953 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:14Z","lastTransitionTime":"2025-11-24T01:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.488229 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.503210 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.514372 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.524755 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.534781 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.546230 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.557382 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12dbf42a-6ae3-49b0-9c9c-066537a38f57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05b738b3ec74c0cc3ba0b2e527320b6c06aa4af4b91a2135f26b299a9dc7ea19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ccf87d0c9abfae5e7978f134c48167ee4e43a523b6d9ae7b9967259dbd1b968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ccf87d0c9abfae5e7978f134c48167ee4e43a523b6d9ae7b9967259dbd1b968\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.568735 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.584830 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.584871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.584881 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.584899 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.584913 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:14Z","lastTransitionTime":"2025-11-24T01:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.585975 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.596755 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.607684 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.619768 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:14:03Z\\\",\\\"message\\\":\\\"2025-11-24T01:13:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8\\\\n2025-11-24T01:13:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8 to /host/opt/cni/bin/\\\\n2025-11-24T01:13:18Z [verbose] multus-daemon started\\\\n2025-11-24T01:13:18Z [verbose] Readiness Indicator file check\\\\n2025-11-24T01:14:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.637591 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.648892 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.662767 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.673126 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:14Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.686539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.686577 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.686586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.686600 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.686622 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:14Z","lastTransitionTime":"2025-11-24T01:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.788626 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.788662 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.788671 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.788687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.788697 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:14Z","lastTransitionTime":"2025-11-24T01:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.890832 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.890871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.890880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.890892 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.890900 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:14Z","lastTransitionTime":"2025-11-24T01:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.994092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.994144 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.994155 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.994179 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.994195 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:14Z","lastTransitionTime":"2025-11-24T01:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.996352 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:14 crc kubenswrapper[4755]: I1124 01:14:14.996355 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:14 crc kubenswrapper[4755]: E1124 01:14:14.996523 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:14 crc kubenswrapper[4755]: E1124 01:14:14.996729 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.096885 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.096930 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.096943 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.096958 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.096969 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:15Z","lastTransitionTime":"2025-11-24T01:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.199701 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.199778 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.199843 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.199875 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.199899 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:15Z","lastTransitionTime":"2025-11-24T01:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.303823 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.303867 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.303875 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.303896 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.303906 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:15Z","lastTransitionTime":"2025-11-24T01:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.407619 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.407652 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.407660 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.407673 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.407683 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:15Z","lastTransitionTime":"2025-11-24T01:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.429703 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/3.log" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.430837 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/2.log" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.435076 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" exitCode=1 Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.435125 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d"} Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.435179 4755 scope.go:117] "RemoveContainer" containerID="154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.436418 4755 scope.go:117] "RemoveContainer" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:14:15 crc kubenswrapper[4755]: E1124 01:14:15.436688 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.462001 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.482944 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.499113 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.510521 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.510805 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.510882 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.510914 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.510936 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:15Z","lastTransitionTime":"2025-11-24T01:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.518494 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.538960 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:14:03Z\\\",\\\"message\\\":\\\"2025-11-24T01:13:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8\\\\n2025-11-24T01:13:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8 to /host/opt/cni/bin/\\\\n2025-11-24T01:13:18Z [verbose] multus-daemon started\\\\n2025-11-24T01:13:18Z [verbose] Readiness Indicator file check\\\\n2025-11-24T01:14:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.556427 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.571841 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.584681 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12dbf42a-6ae3-49b0-9c9c-066537a38f57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05b738b3ec74c0cc3ba0b2e527320b6c06aa4af4b91a2135f26b299a9dc7ea19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ccf87d0c9abfae5e7978f134c48167ee4e43a523b6d9ae7b9967259dbd1b968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ccf87d0c9abfae5e7978f134c48167ee4e43a523b6d9ae7b9967259dbd1b968\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.600848 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.613661 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.613690 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.613698 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.613710 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.613719 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:15Z","lastTransitionTime":"2025-11-24T01:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.615971 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.629710 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.655070 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.668420 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.684573 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.694998 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.711444 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:14:14Z\\\",\\\"message\\\":\\\" ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-console/downloads_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1124 01:14:14.816686 6794 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to sh\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.715311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.715338 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.715347 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.715362 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.715371 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:15Z","lastTransitionTime":"2025-11-24T01:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.727240 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.739112 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.750440 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:15Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.822104 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.822146 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.822154 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.822169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.822178 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:15Z","lastTransitionTime":"2025-11-24T01:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.925951 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.926001 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.926018 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.926040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.926056 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:15Z","lastTransitionTime":"2025-11-24T01:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.995865 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:15 crc kubenswrapper[4755]: E1124 01:14:15.996079 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:15 crc kubenswrapper[4755]: I1124 01:14:15.996242 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:15 crc kubenswrapper[4755]: E1124 01:14:15.996484 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.011588 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.029316 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.029368 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.029383 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.029406 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.029425 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.031189 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.051127 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.064434 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.083083 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:14:03Z\\\",\\\"message\\\":\\\"2025-11-24T01:13:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8\\\\n2025-11-24T01:13:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8 to /host/opt/cni/bin/\\\\n2025-11-24T01:13:18Z [verbose] multus-daemon started\\\\n2025-11-24T01:13:18Z [verbose] Readiness Indicator file check\\\\n2025-11-24T01:14:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.093771 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.108058 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.122054 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12dbf42a-6ae3-49b0-9c9c-066537a38f57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05b738b3ec74c0cc3ba0b2e527320b6c06aa4af4b91a2135f26b299a9dc7ea19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ccf87d0c9abfae5e7978f134c48167ee4e43a523b6d9ae7b9967259dbd1b968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ccf87d0c9abfae5e7978f134c48167ee4e43a523b6d9ae7b9967259dbd1b968\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.131086 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.131135 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.131153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.131176 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.131193 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.145322 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.158867 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.231672 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.240363 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.240620 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.240684 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.240750 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.240808 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.248423 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.261724 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.276283 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.286421 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.309060 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://154995ba8ce124a48fc983de87e0fcbccc684c8bf36edf3b12565a113f82c8b5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:13:44Z\\\",\\\"message\\\":\\\"ices.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1124 01:13:44.947473 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1124 01:13:44.947487 6420 services_controller.go:452] Built service openshift-kube-apiserver/apiserver per-node LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947496 6420 services_controller.go:453] Built service openshift-kube-apiserver/apiserver template LB for network=default: []services.LB{}\\\\nI1124 01:13:44.947495 6420 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1124 01:13:44.946799 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:14:14Z\\\",\\\"message\\\":\\\" ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-console/downloads_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1124 01:14:14.816686 6794 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to sh\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.324334 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.336262 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.336588 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.336641 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.336650 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.336664 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.336673 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: E1124 01:14:16.347946 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.349185 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.350656 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.350688 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.350700 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.350714 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.350722 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: E1124 01:14:16.361589 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.364534 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.364573 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.364584 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.364615 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.364627 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: E1124 01:14:16.375944 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.379811 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.379864 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.379883 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.379909 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.379927 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: E1124 01:14:16.394742 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.398954 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.398996 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.399014 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.399035 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.399050 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: E1124 01:14:16.412538 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"50bb41a3-bb20-461c-ba4c-72998ece87bc\\\",\\\"systemUUID\\\":\\\"cb6dbfa2-ee9a-4406-af65-1558b4c6cb25\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: E1124 01:14:16.413226 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.414941 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.415105 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.415210 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.415293 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.415410 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.440916 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/3.log" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.445349 4755 scope.go:117] "RemoveContainer" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:14:16 crc kubenswrapper[4755]: E1124 01:14:16.445518 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.459295 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.470533 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.482812 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4407e6a61e6a16826e5087eda8f3f2904933c9fb540f0a3c03e5414188248657\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bcaff046f33f0df74919d9673b7e12e0609ea03f7686c1806ffb3c351965195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.493796 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://519ff5cd20c605ee21f4c1651c932618d593cb5ecf336f3504ebe1bd0f35adbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.504890 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1962128-02a0-46c3-82c2-5055c2aed0b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a249831bc7bdc4396bd5498c176a52332946f791c39b1f90eea4c157e61fc60c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hcgb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h8xzm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.517530 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.517550 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.517558 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.517569 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.517578 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.518196 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8pm69" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19dbf7ff-f684-4c57-803a-83b39e0705a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:14:03Z\\\",\\\"message\\\":\\\"2025-11-24T01:13:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8\\\\n2025-11-24T01:13:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_29c906e4-4a05-4506-9910-8ca86f43fdc8 to /host/opt/cni/bin/\\\\n2025-11-24T01:13:18Z [verbose] multus-daemon started\\\\n2025-11-24T01:13:18Z [verbose] Readiness Indicator file check\\\\n2025-11-24T01:14:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c6k7s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8pm69\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.526618 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vzkz4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a85ee7-3417-491b-a375-99f140cfb5de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67684d7d9617d917019f391c4031994da8f4ff110a4fe3d77dfa1bf76378dec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tts5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vzkz4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.537122 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a32df1d1-89b4-4a22-a07f-2d7ecd2e265b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f7432e293e8131ca1bbbc7281982a639b41e995fc2210e6351a0e87cf719700\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff945c5b4cb1534cb8e31d7e471ebe2f47fe04ea46f09fc53223a72f4c48de5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2pjf7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7wvfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.545992 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12dbf42a-6ae3-49b0-9c9c-066537a38f57\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05b738b3ec74c0cc3ba0b2e527320b6c06aa4af4b91a2135f26b299a9dc7ea19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ccf87d0c9abfae5e7978f134c48167ee4e43a523b6d9ae7b9967259dbd1b968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2ccf87d0c9abfae5e7978f134c48167ee4e43a523b6d9ae7b9967259dbd1b968\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.556820 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"335c2304-248f-4c4e-8e4c-58ec5e76af4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://288a11a39d941a7886acc9d7ee23935ca723a109dcb2b8f0743bb56e7d54a7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://277775a9aa022595c0bd26119595cdf69b370e4851a05cab33b8ef4779923a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beedc1a75d10e8bbe8f51adfca845b931ae288d59d89220ecc08bb65c425dcd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfc5ee83a76ce24ca8eab469ec28b319d9e69d9c7d8618f97a2b2a813022a97\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.569878 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27983572-2d9c-43d6-a7f0-445a0aec0531\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://612fe8e80230000f941c6c568234f32233a32e831a6d75556d9af68a1b790a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48cf7cff331d1b4d58c399805b529532340c0df6d43959b7e8782bb4061601c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b0f6076ed545d4dabca11992b4c4e8c166fbe6539ea16e84f86f810805417f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c2a6eaa6602d05cbe843509671986abddb2b069bdce511e82fee52f19d61966e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd00a9be6417060d6fcf96baafa869bd241212409aafbb9cc51b1dc9f79fb38b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://372123a18102ee49379a4c4ffa8492eb65fa054b18dfd12e0a281d3cbad3f7b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56a8ddef30c6e3f9c1869689e55fdff8f5e6a8e29d85648329927e7503829fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgwd7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zb6qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.578262 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ccb86693-0b66-43ca-a2d1-e9594521d30f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wl68c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:29Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9cl8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.598711 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b438206-d27d-46e5-a2b5-91397c0d8856\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7575abb359776b9b306d821357a1147cadfd41c99d9528e644a3c1fa046d1df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11946902b549b820749fb6127935c82cc1e840cd84834d1803ffc03003c967d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34675c12ebbb549e0db1190d1d99a23ccef6e4a97b884bcdafdc629204238c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25ec710b1a2a3c639ced06a73586f11fbc050671bc059fdcaf49714f153445d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64cdda2f5b59ddeadc36986c875cf28e18a7ed1ce1822dff6002724f7e558215\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://779e7c1b2fcb95b109f265debd939c3d61ff074ac5774070056e62d13672bb47\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92c3a2ebd58d2daa4d72c12152c8603b049b22ef4b0c86b41d8c300a0c78735a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9a2d9971aadb0d0ea373c9969e8f605c231f05d90d4ad5ab037e7c878d71aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.610516 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69352793-3db7-450c-98c2-5fd2040bb5c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99c1c133fe773aac40181aea15bd49ad2aee2f08caed8f7436c81ae3fc310300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b22922ca0ffb3c9c410a6ed82822ce5b3436d0e44993abe1fcb678959f243808\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5845caccbc89d64a7aaabedeec11a5d75fb7f25eb806cf8dceaf15263f30b4f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a9cb7e9aa093857c03bb4267ff3386e04e4de6e4025fdbfcb628db800d62745\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.620122 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.620187 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.620201 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.620219 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.620229 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.621790 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.630759 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dt8lz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58ea3cf6-1f18-428e-9b3f-6064671faf72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63e4ef9d52542d28688b962a86087f6b95a40625eefb13ad0460e2e728f5048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fc2wq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dt8lz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.651836 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T01:14:14Z\\\",\\\"message\\\":\\\" ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-console/downloads_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.213\\\\\\\", Port:80, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1124 01:14:14.816686 6794 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to sh\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:14:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:13:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5nhqm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:13:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4ngwk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.666182 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"103d5a7c-0ec9-4c03-9f86-03dc3e01665c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T01:12:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40df6848b15d905bccead2547cc1dec836a69e38e80cb65de5dbee812f0081cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89d05f3773b4861d3bb33d7e6a6f09baaedb7510ed0b15703495110923a2f790\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78a49336f91d7c8aeb0d2861d1228d7b8eb478290bdef3b0662e26f9e1cddd57\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5cbe55a8cc684f932438f20f513d01400266c1c6ee3ded9c9ae1ea0b92aec7af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://464ba5932bcc6cd336f3b28a7451b718b415b48d21dd80edcbded6774fa8737a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T01:13:12Z\\\",\\\"message\\\":\\\"GCM_SHA384' detected.\\\\nW1124 01:13:12.041119 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1124 01:13:12.041123 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1124 01:13:12.046149 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046144 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1124 01:13:12.046262 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1124 01:13:12.046271 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1124 01:13:12.046191 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1124 01:13:12.046381 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1124 01:13:12.046396 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1124 01:13:12.046536 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\"\\\\nI1124 01:13:12.046556 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2431684917/tls.crt::/tmp/serving-cert-2431684917/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1763946791\\\\\\\\\\\\\\\" (2025-11-24 01:13:10 +0000 UTC to 2025-12-24 01:13:11 +0000 UTC (now=2025-11-24 01:13:12.046510889 +0000 UTC))\\\\\\\"\\\\nF1124 01:13:12.046649 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T01:13:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcfcdeaa4c2ae4a6f4bf2d79a64607361b9798549e8910a7374e4e3d17a16ae\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:12:58Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e678d47c4fee571703aa3f91d93aa4024ef1f078997f83f26bbae9cb0de00dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T01:12:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T01:12:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T01:12:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.677949 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T01:13:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://311d898b91dec73c482063aca5a79fefbc55b705d41d4da5fbbc21d8972deac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T01:13:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T01:14:16Z is after 2025-08-24T17:21:41Z" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.727032 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.727101 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.727125 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.727153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.727173 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.830393 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.830442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.830458 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.830480 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.830499 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.934042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.934101 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.934120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.934146 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.934163 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:16Z","lastTransitionTime":"2025-11-24T01:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.995701 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:16 crc kubenswrapper[4755]: I1124 01:14:16.995701 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:16 crc kubenswrapper[4755]: E1124 01:14:16.995985 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:16 crc kubenswrapper[4755]: E1124 01:14:16.996144 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.037093 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.037152 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.037176 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.037203 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.037225 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:17Z","lastTransitionTime":"2025-11-24T01:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.140788 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.140926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.140944 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.140982 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.141019 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:17Z","lastTransitionTime":"2025-11-24T01:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.243574 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.243710 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.243733 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.243761 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.243777 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:17Z","lastTransitionTime":"2025-11-24T01:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.346157 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.346201 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.346213 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.346230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.346242 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:17Z","lastTransitionTime":"2025-11-24T01:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.449446 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.449519 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.449529 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.449545 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.449555 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:17Z","lastTransitionTime":"2025-11-24T01:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.553377 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.553426 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.553444 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.553467 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.553484 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:17Z","lastTransitionTime":"2025-11-24T01:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.658204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.658252 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.658266 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.658287 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.658302 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:17Z","lastTransitionTime":"2025-11-24T01:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.760996 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.761050 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.761066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.761090 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.761108 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:17Z","lastTransitionTime":"2025-11-24T01:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.864522 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.864571 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.864584 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.864638 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.864657 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:17Z","lastTransitionTime":"2025-11-24T01:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.967987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.968064 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.968089 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.968116 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.968137 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:17Z","lastTransitionTime":"2025-11-24T01:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.995877 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:17 crc kubenswrapper[4755]: I1124 01:14:17.995921 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:17 crc kubenswrapper[4755]: E1124 01:14:17.996045 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:17 crc kubenswrapper[4755]: E1124 01:14:17.996124 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.070692 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.070797 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.070816 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.070839 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.070856 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:18Z","lastTransitionTime":"2025-11-24T01:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.174310 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.174354 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.174367 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.174387 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.174402 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:18Z","lastTransitionTime":"2025-11-24T01:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.277628 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.277672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.277710 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.277730 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.277742 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:18Z","lastTransitionTime":"2025-11-24T01:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.381133 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.381201 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.381220 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.381244 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.381265 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:18Z","lastTransitionTime":"2025-11-24T01:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.484566 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.484690 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.484718 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.484745 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.484767 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:18Z","lastTransitionTime":"2025-11-24T01:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.587038 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.587089 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.587103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.587120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.587135 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:18Z","lastTransitionTime":"2025-11-24T01:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.689585 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.689740 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.689764 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.689793 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.689818 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:18Z","lastTransitionTime":"2025-11-24T01:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.793225 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.793314 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.793347 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.793376 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.793397 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:18Z","lastTransitionTime":"2025-11-24T01:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.895326 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.895364 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.895377 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.895398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.895413 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:18Z","lastTransitionTime":"2025-11-24T01:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.995965 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.996034 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:18 crc kubenswrapper[4755]: E1124 01:14:18.996088 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:18 crc kubenswrapper[4755]: E1124 01:14:18.996347 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.997580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.997632 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.997647 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.997664 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:18 crc kubenswrapper[4755]: I1124 01:14:18.997680 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:18Z","lastTransitionTime":"2025-11-24T01:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.100495 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.100538 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.100551 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.100569 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.100581 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:19Z","lastTransitionTime":"2025-11-24T01:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.203216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.203289 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.203306 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.203331 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.203350 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:19Z","lastTransitionTime":"2025-11-24T01:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.306155 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.306222 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.306242 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.306267 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.306284 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:19Z","lastTransitionTime":"2025-11-24T01:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.409530 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.409572 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.409587 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.409616 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.409625 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:19Z","lastTransitionTime":"2025-11-24T01:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.512827 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.512888 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.512904 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.512928 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.512944 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:19Z","lastTransitionTime":"2025-11-24T01:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.615948 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.616008 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.616025 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.616050 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.616066 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:19Z","lastTransitionTime":"2025-11-24T01:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.718620 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.718665 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.718683 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.718705 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.718718 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:19Z","lastTransitionTime":"2025-11-24T01:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.796323 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.796409 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.796640 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.796672 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.796670 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.796693 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.796726 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.796753 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.796805 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 01:15:23.796775734 +0000 UTC m=+148.482841275 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.796847 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 01:15:23.796828756 +0000 UTC m=+148.482894307 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.822057 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.822155 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.822173 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.822197 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.822219 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:19Z","lastTransitionTime":"2025-11-24T01:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.897162 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.897354 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.897402 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.897545 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.897680 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:15:23.897650342 +0000 UTC m=+148.583715873 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.897944 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.898042 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:23.898005122 +0000 UTC m=+148.584070633 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.898129 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 01:15:23.898108565 +0000 UTC m=+148.584174136 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.926250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.926321 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.926342 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.926379 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.926403 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:19Z","lastTransitionTime":"2025-11-24T01:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.996240 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:19 crc kubenswrapper[4755]: I1124 01:14:19.996343 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.996392 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:19 crc kubenswrapper[4755]: E1124 01:14:19.996539 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.028797 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.028863 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.028880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.028905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.028924 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:20Z","lastTransitionTime":"2025-11-24T01:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.132033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.132085 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.132097 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.132118 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.132136 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:20Z","lastTransitionTime":"2025-11-24T01:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.234949 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.235035 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.235077 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.235110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.235130 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:20Z","lastTransitionTime":"2025-11-24T01:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.338570 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.338662 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.338674 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.338692 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.338703 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:20Z","lastTransitionTime":"2025-11-24T01:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.440854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.440897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.440909 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.440925 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.440936 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:20Z","lastTransitionTime":"2025-11-24T01:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.544375 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.544430 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.544447 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.544470 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.544486 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:20Z","lastTransitionTime":"2025-11-24T01:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.647409 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.647508 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.647531 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.647560 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.647583 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:20Z","lastTransitionTime":"2025-11-24T01:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.750922 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.751014 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.751037 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.751066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.751088 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:20Z","lastTransitionTime":"2025-11-24T01:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.854346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.854408 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.854420 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.854438 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.854450 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:20Z","lastTransitionTime":"2025-11-24T01:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.957854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.958331 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.958350 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.958376 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.958395 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:20Z","lastTransitionTime":"2025-11-24T01:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.996660 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:20 crc kubenswrapper[4755]: I1124 01:14:20.996725 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:20 crc kubenswrapper[4755]: E1124 01:14:20.996903 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:20 crc kubenswrapper[4755]: E1124 01:14:20.997094 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.060968 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.061011 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.061022 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.061042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.061066 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:21Z","lastTransitionTime":"2025-11-24T01:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.164226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.164280 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.164296 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.164316 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.164330 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:21Z","lastTransitionTime":"2025-11-24T01:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.266376 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.266422 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.266432 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.266447 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.266457 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:21Z","lastTransitionTime":"2025-11-24T01:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.368805 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.368836 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.368845 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.368858 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.368867 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:21Z","lastTransitionTime":"2025-11-24T01:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.471375 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.471425 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.471436 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.471460 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.471472 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:21Z","lastTransitionTime":"2025-11-24T01:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.574291 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.574379 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.574404 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.574441 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.574460 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:21Z","lastTransitionTime":"2025-11-24T01:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.677948 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.678027 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.678053 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.678084 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.678107 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:21Z","lastTransitionTime":"2025-11-24T01:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.781254 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.781318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.781335 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.781359 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.781375 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:21Z","lastTransitionTime":"2025-11-24T01:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.884919 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.884998 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.885016 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.885040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.885058 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:21Z","lastTransitionTime":"2025-11-24T01:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.987462 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.987553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.987571 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.987596 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.987685 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:21Z","lastTransitionTime":"2025-11-24T01:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.996579 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:21 crc kubenswrapper[4755]: E1124 01:14:21.996742 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:21 crc kubenswrapper[4755]: I1124 01:14:21.997968 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:21 crc kubenswrapper[4755]: E1124 01:14:21.998082 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.090915 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.091070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.091095 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.091122 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.091139 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:22Z","lastTransitionTime":"2025-11-24T01:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.193153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.193184 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.193192 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.193208 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.193227 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:22Z","lastTransitionTime":"2025-11-24T01:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.296526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.296591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.296658 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.296685 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.296703 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:22Z","lastTransitionTime":"2025-11-24T01:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.399591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.399732 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.399750 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.399772 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.399789 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:22Z","lastTransitionTime":"2025-11-24T01:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.502639 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.502685 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.502697 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.502726 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.502740 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:22Z","lastTransitionTime":"2025-11-24T01:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.605334 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.605377 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.605387 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.605401 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.605412 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:22Z","lastTransitionTime":"2025-11-24T01:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.708305 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.708949 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.709157 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.709327 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.709497 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:22Z","lastTransitionTime":"2025-11-24T01:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.813725 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.814543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.814725 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.814899 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.815034 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:22Z","lastTransitionTime":"2025-11-24T01:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.918881 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.918960 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.918983 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.919011 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.919031 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:22Z","lastTransitionTime":"2025-11-24T01:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.995883 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:22 crc kubenswrapper[4755]: E1124 01:14:22.996066 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:22 crc kubenswrapper[4755]: I1124 01:14:22.995915 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:22 crc kubenswrapper[4755]: E1124 01:14:22.996234 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.022578 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.022688 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.022707 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.022731 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.022754 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:23Z","lastTransitionTime":"2025-11-24T01:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.125967 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.126238 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.126261 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.126291 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.126314 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:23Z","lastTransitionTime":"2025-11-24T01:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.228442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.228523 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.228545 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.228570 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.228589 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:23Z","lastTransitionTime":"2025-11-24T01:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.332143 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.332215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.332227 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.332271 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.332284 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:23Z","lastTransitionTime":"2025-11-24T01:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.435260 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.435319 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.435336 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.435360 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.435377 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:23Z","lastTransitionTime":"2025-11-24T01:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.537360 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.537597 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.538871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.538951 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.539018 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:23Z","lastTransitionTime":"2025-11-24T01:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.642946 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.643007 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.643027 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.643052 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.643069 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:23Z","lastTransitionTime":"2025-11-24T01:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.746156 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.746206 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.746222 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.746244 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.746261 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:23Z","lastTransitionTime":"2025-11-24T01:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.849529 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.849639 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.849663 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.849697 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.849721 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:23Z","lastTransitionTime":"2025-11-24T01:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.953009 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.953067 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.953083 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.953111 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.953129 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:23Z","lastTransitionTime":"2025-11-24T01:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.996704 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:23 crc kubenswrapper[4755]: E1124 01:14:23.996831 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:23 crc kubenswrapper[4755]: I1124 01:14:23.997143 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:23 crc kubenswrapper[4755]: E1124 01:14:23.997422 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.056413 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.056470 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.056489 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.056512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.056529 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:24Z","lastTransitionTime":"2025-11-24T01:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.160047 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.160121 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.160141 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.160165 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.160181 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:24Z","lastTransitionTime":"2025-11-24T01:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.263260 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.263300 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.263311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.263327 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.263339 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:24Z","lastTransitionTime":"2025-11-24T01:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.365926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.365972 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.365981 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.365994 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.366004 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:24Z","lastTransitionTime":"2025-11-24T01:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.468792 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.469062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.469180 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.469293 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.469393 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:24Z","lastTransitionTime":"2025-11-24T01:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.571537 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.571627 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.571638 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.571653 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.571664 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:24Z","lastTransitionTime":"2025-11-24T01:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.673978 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.674021 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.674033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.674049 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.674060 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:24Z","lastTransitionTime":"2025-11-24T01:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.777149 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.777204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.777216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.777234 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.777246 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:24Z","lastTransitionTime":"2025-11-24T01:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.879691 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.879739 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.879755 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.879776 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.879791 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:24Z","lastTransitionTime":"2025-11-24T01:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.982808 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.982854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.982863 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.982883 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.982892 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:24Z","lastTransitionTime":"2025-11-24T01:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.995526 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:24 crc kubenswrapper[4755]: I1124 01:14:24.995543 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:24 crc kubenswrapper[4755]: E1124 01:14:24.995734 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:24 crc kubenswrapper[4755]: E1124 01:14:24.995863 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.086010 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.086066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.086079 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.086099 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.086115 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:25Z","lastTransitionTime":"2025-11-24T01:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.189134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.189178 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.189190 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.189205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.189216 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:25Z","lastTransitionTime":"2025-11-24T01:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.292650 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.292706 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.292718 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.292735 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.292749 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:25Z","lastTransitionTime":"2025-11-24T01:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.395996 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.396092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.396113 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.396136 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.396152 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:25Z","lastTransitionTime":"2025-11-24T01:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.498542 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.498583 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.498593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.498625 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.498636 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:25Z","lastTransitionTime":"2025-11-24T01:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.601339 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.601414 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.601438 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.601465 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.601486 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:25Z","lastTransitionTime":"2025-11-24T01:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.704466 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.704533 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.704556 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.704585 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.704637 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:25Z","lastTransitionTime":"2025-11-24T01:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.807778 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.807839 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.807856 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.807878 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.807899 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:25Z","lastTransitionTime":"2025-11-24T01:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.911222 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.911268 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.911280 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.911325 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.911337 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:25Z","lastTransitionTime":"2025-11-24T01:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.996685 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:25 crc kubenswrapper[4755]: I1124 01:14:25.996784 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:25 crc kubenswrapper[4755]: E1124 01:14:25.996886 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:25 crc kubenswrapper[4755]: E1124 01:14:25.997039 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.014059 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.014115 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.014135 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.014163 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.014185 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:26Z","lastTransitionTime":"2025-11-24T01:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.060459 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=68.060432039 podStartE2EDuration="1m8.060432039s" podCreationTimestamp="2025-11-24 01:13:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.060356116 +0000 UTC m=+90.746421727" watchObservedRunningTime="2025-11-24 01:14:26.060432039 +0000 UTC m=+90.746497580" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.081974 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=70.081109287 podStartE2EDuration="1m10.081109287s" podCreationTimestamp="2025-11-24 01:13:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.080123618 +0000 UTC m=+90.766189139" watchObservedRunningTime="2025-11-24 01:14:26.081109287 +0000 UTC m=+90.767174868" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.097182 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-zb6qq" podStartSLOduration=71.097163709 podStartE2EDuration="1m11.097163709s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.097118948 +0000 UTC m=+90.783184439" watchObservedRunningTime="2025-11-24 01:14:26.097163709 +0000 UTC m=+90.783229210" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.116642 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.116917 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.117001 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.117081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.117152 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:26Z","lastTransitionTime":"2025-11-24T01:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.172497 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=71.172477735 podStartE2EDuration="1m11.172477735s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.15633562 +0000 UTC m=+90.842401141" watchObservedRunningTime="2025-11-24 01:14:26.172477735 +0000 UTC m=+90.858543236" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.206102 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-dt8lz" podStartSLOduration=71.206084033 podStartE2EDuration="1m11.206084033s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.19442642 +0000 UTC m=+90.880491931" watchObservedRunningTime="2025-11-24 01:14:26.206084033 +0000 UTC m=+90.892149534" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.219450 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.219692 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.219817 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.219912 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.220010 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:26Z","lastTransitionTime":"2025-11-24T01:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.242688 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podStartSLOduration=71.24266815 podStartE2EDuration="1m11.24266815s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.226394951 +0000 UTC m=+90.912460472" watchObservedRunningTime="2025-11-24 01:14:26.24266815 +0000 UTC m=+90.928733651" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.242860 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-8pm69" podStartSLOduration=71.242844695 podStartE2EDuration="1m11.242844695s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.242101673 +0000 UTC m=+90.928167174" watchObservedRunningTime="2025-11-24 01:14:26.242844695 +0000 UTC m=+90.928910196" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.257017 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-vzkz4" podStartSLOduration=71.256995851 podStartE2EDuration="1m11.256995851s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.256526448 +0000 UTC m=+90.942591949" watchObservedRunningTime="2025-11-24 01:14:26.256995851 +0000 UTC m=+90.943061362" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.268935 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7wvfc" podStartSLOduration=71.268915062 podStartE2EDuration="1m11.268915062s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.267901172 +0000 UTC m=+90.953966693" watchObservedRunningTime="2025-11-24 01:14:26.268915062 +0000 UTC m=+90.954980563" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.277578 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=20.277565417 podStartE2EDuration="20.277565417s" podCreationTimestamp="2025-11-24 01:14:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.277472524 +0000 UTC m=+90.963538025" watchObservedRunningTime="2025-11-24 01:14:26.277565417 +0000 UTC m=+90.963630908" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.291424 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=40.291406164 podStartE2EDuration="40.291406164s" podCreationTimestamp="2025-11-24 01:13:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:26.290790626 +0000 UTC m=+90.976856127" watchObservedRunningTime="2025-11-24 01:14:26.291406164 +0000 UTC m=+90.977471665" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.321799 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.322022 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.322117 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.322196 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.322265 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:26Z","lastTransitionTime":"2025-11-24T01:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.429920 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.429979 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.429992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.430010 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.430026 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:26Z","lastTransitionTime":"2025-11-24T01:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.505247 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.505478 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.505548 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.505634 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.505709 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T01:14:26Z","lastTransitionTime":"2025-11-24T01:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.549342 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj"] Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.550330 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.552686 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.553641 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.554434 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.555252 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.675827 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c295480-4266-4d84-a522-358aaa6bb8d6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.675884 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9c295480-4266-4d84-a522-358aaa6bb8d6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.675912 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9c295480-4266-4d84-a522-358aaa6bb8d6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.675955 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9c295480-4266-4d84-a522-358aaa6bb8d6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.676051 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c295480-4266-4d84-a522-358aaa6bb8d6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.776966 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9c295480-4266-4d84-a522-358aaa6bb8d6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.777060 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c295480-4266-4d84-a522-358aaa6bb8d6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.777097 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c295480-4266-4d84-a522-358aaa6bb8d6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.777139 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9c295480-4266-4d84-a522-358aaa6bb8d6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.777184 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9c295480-4266-4d84-a522-358aaa6bb8d6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.777248 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9c295480-4266-4d84-a522-358aaa6bb8d6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.777374 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9c295480-4266-4d84-a522-358aaa6bb8d6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.778239 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9c295480-4266-4d84-a522-358aaa6bb8d6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.786233 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c295480-4266-4d84-a522-358aaa6bb8d6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.798516 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c295480-4266-4d84-a522-358aaa6bb8d6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-p4mlj\" (UID: \"9c295480-4266-4d84-a522-358aaa6bb8d6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.863143 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.995837 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:26 crc kubenswrapper[4755]: I1124 01:14:26.995895 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:26 crc kubenswrapper[4755]: E1124 01:14:26.995969 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:26 crc kubenswrapper[4755]: E1124 01:14:26.996115 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:27 crc kubenswrapper[4755]: I1124 01:14:27.486201 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" event={"ID":"9c295480-4266-4d84-a522-358aaa6bb8d6","Type":"ContainerStarted","Data":"ecf47464bfa9d9d7fcbc306f5f8fc344f33e892dcc53a305f4aea977ef71fb12"} Nov 24 01:14:27 crc kubenswrapper[4755]: I1124 01:14:27.486280 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" event={"ID":"9c295480-4266-4d84-a522-358aaa6bb8d6","Type":"ContainerStarted","Data":"05b1a267815a86dd41ec9c330d31af1f0412602b8cafaf488515cf0bb66dcddf"} Nov 24 01:14:27 crc kubenswrapper[4755]: I1124 01:14:27.508927 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p4mlj" podStartSLOduration=72.508900211 podStartE2EDuration="1m12.508900211s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:27.507805208 +0000 UTC m=+92.193870719" watchObservedRunningTime="2025-11-24 01:14:27.508900211 +0000 UTC m=+92.194965712" Nov 24 01:14:27 crc kubenswrapper[4755]: I1124 01:14:27.995707 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:27 crc kubenswrapper[4755]: E1124 01:14:27.997183 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:27 crc kubenswrapper[4755]: I1124 01:14:27.995757 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:27 crc kubenswrapper[4755]: E1124 01:14:27.997287 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:28 crc kubenswrapper[4755]: I1124 01:14:28.995828 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:28 crc kubenswrapper[4755]: I1124 01:14:28.995864 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:28 crc kubenswrapper[4755]: E1124 01:14:28.995977 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:28 crc kubenswrapper[4755]: E1124 01:14:28.996082 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:29 crc kubenswrapper[4755]: I1124 01:14:29.996332 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:29 crc kubenswrapper[4755]: I1124 01:14:29.996406 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:29 crc kubenswrapper[4755]: E1124 01:14:29.996469 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:29 crc kubenswrapper[4755]: E1124 01:14:29.996639 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:30 crc kubenswrapper[4755]: I1124 01:14:30.997248 4755 scope.go:117] "RemoveContainer" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:14:30 crc kubenswrapper[4755]: E1124 01:14:30.997388 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" Nov 24 01:14:30 crc kubenswrapper[4755]: I1124 01:14:30.997536 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:30 crc kubenswrapper[4755]: I1124 01:14:30.997879 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:30 crc kubenswrapper[4755]: E1124 01:14:30.997925 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:30 crc kubenswrapper[4755]: E1124 01:14:30.998083 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:31 crc kubenswrapper[4755]: I1124 01:14:31.996589 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:31 crc kubenswrapper[4755]: I1124 01:14:31.996679 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:31 crc kubenswrapper[4755]: E1124 01:14:31.997220 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:31 crc kubenswrapper[4755]: E1124 01:14:31.997320 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:32 crc kubenswrapper[4755]: I1124 01:14:32.995948 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:32 crc kubenswrapper[4755]: I1124 01:14:32.995974 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:32 crc kubenswrapper[4755]: E1124 01:14:32.996059 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:32 crc kubenswrapper[4755]: E1124 01:14:32.996122 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:33 crc kubenswrapper[4755]: I1124 01:14:33.649705 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:33 crc kubenswrapper[4755]: E1124 01:14:33.649843 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:14:33 crc kubenswrapper[4755]: E1124 01:14:33.649893 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs podName:ccb86693-0b66-43ca-a2d1-e9594521d30f nodeName:}" failed. No retries permitted until 2025-11-24 01:15:37.649880154 +0000 UTC m=+162.335945655 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs") pod "network-metrics-daemon-9cl8m" (UID: "ccb86693-0b66-43ca-a2d1-e9594521d30f") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 01:14:33 crc kubenswrapper[4755]: I1124 01:14:33.996183 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:33 crc kubenswrapper[4755]: I1124 01:14:33.996199 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:33 crc kubenswrapper[4755]: E1124 01:14:33.996463 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:33 crc kubenswrapper[4755]: E1124 01:14:33.996543 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:34 crc kubenswrapper[4755]: I1124 01:14:34.995847 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:34 crc kubenswrapper[4755]: I1124 01:14:34.995847 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:34 crc kubenswrapper[4755]: E1124 01:14:34.996102 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:34 crc kubenswrapper[4755]: E1124 01:14:34.996204 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:35 crc kubenswrapper[4755]: I1124 01:14:35.995698 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:35 crc kubenswrapper[4755]: I1124 01:14:35.995767 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:35 crc kubenswrapper[4755]: E1124 01:14:35.998203 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:35 crc kubenswrapper[4755]: E1124 01:14:35.998308 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:36 crc kubenswrapper[4755]: I1124 01:14:36.995903 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:36 crc kubenswrapper[4755]: E1124 01:14:36.996040 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:36 crc kubenswrapper[4755]: I1124 01:14:36.995931 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:36 crc kubenswrapper[4755]: E1124 01:14:36.996119 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:37 crc kubenswrapper[4755]: I1124 01:14:37.995971 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:37 crc kubenswrapper[4755]: E1124 01:14:37.996094 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:37 crc kubenswrapper[4755]: I1124 01:14:37.996342 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:37 crc kubenswrapper[4755]: E1124 01:14:37.996395 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:38 crc kubenswrapper[4755]: I1124 01:14:38.995651 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:38 crc kubenswrapper[4755]: E1124 01:14:38.995765 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:38 crc kubenswrapper[4755]: I1124 01:14:38.995651 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:38 crc kubenswrapper[4755]: E1124 01:14:38.995954 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:39 crc kubenswrapper[4755]: I1124 01:14:39.996320 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:39 crc kubenswrapper[4755]: I1124 01:14:39.996364 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:39 crc kubenswrapper[4755]: E1124 01:14:39.996470 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:39 crc kubenswrapper[4755]: E1124 01:14:39.996725 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:40 crc kubenswrapper[4755]: I1124 01:14:40.996113 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:40 crc kubenswrapper[4755]: E1124 01:14:40.996539 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:40 crc kubenswrapper[4755]: I1124 01:14:40.996131 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:40 crc kubenswrapper[4755]: E1124 01:14:40.997283 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:41 crc kubenswrapper[4755]: I1124 01:14:41.996147 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:41 crc kubenswrapper[4755]: I1124 01:14:41.996181 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:41 crc kubenswrapper[4755]: E1124 01:14:41.996333 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:41 crc kubenswrapper[4755]: E1124 01:14:41.996494 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:42 crc kubenswrapper[4755]: I1124 01:14:42.995687 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:42 crc kubenswrapper[4755]: I1124 01:14:42.995697 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:42 crc kubenswrapper[4755]: E1124 01:14:42.995954 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:42 crc kubenswrapper[4755]: E1124 01:14:42.996031 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:43 crc kubenswrapper[4755]: I1124 01:14:43.996457 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:43 crc kubenswrapper[4755]: E1124 01:14:43.996646 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:43 crc kubenswrapper[4755]: I1124 01:14:43.996763 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:43 crc kubenswrapper[4755]: E1124 01:14:43.997148 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:43 crc kubenswrapper[4755]: I1124 01:14:43.997426 4755 scope.go:117] "RemoveContainer" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:14:43 crc kubenswrapper[4755]: E1124 01:14:43.997637 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4ngwk_openshift-ovn-kubernetes(b3b1d3cb-ffbd-4034-832d-6577ccf2f780)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" Nov 24 01:14:44 crc kubenswrapper[4755]: I1124 01:14:44.996081 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:44 crc kubenswrapper[4755]: I1124 01:14:44.996078 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:44 crc kubenswrapper[4755]: E1124 01:14:44.996442 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:44 crc kubenswrapper[4755]: E1124 01:14:44.996666 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:45 crc kubenswrapper[4755]: I1124 01:14:45.996039 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:45 crc kubenswrapper[4755]: I1124 01:14:45.996073 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:45 crc kubenswrapper[4755]: E1124 01:14:45.997193 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:45 crc kubenswrapper[4755]: E1124 01:14:45.997427 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:46 crc kubenswrapper[4755]: I1124 01:14:46.995585 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:46 crc kubenswrapper[4755]: I1124 01:14:46.995672 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:46 crc kubenswrapper[4755]: E1124 01:14:46.995840 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:46 crc kubenswrapper[4755]: E1124 01:14:46.995990 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:47 crc kubenswrapper[4755]: I1124 01:14:47.995638 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:47 crc kubenswrapper[4755]: I1124 01:14:47.995836 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:47 crc kubenswrapper[4755]: E1124 01:14:47.995866 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:47 crc kubenswrapper[4755]: E1124 01:14:47.996186 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:48 crc kubenswrapper[4755]: I1124 01:14:48.995937 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:48 crc kubenswrapper[4755]: I1124 01:14:48.996038 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:48 crc kubenswrapper[4755]: E1124 01:14:48.996214 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:48 crc kubenswrapper[4755]: E1124 01:14:48.996043 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:49 crc kubenswrapper[4755]: I1124 01:14:49.995635 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:49 crc kubenswrapper[4755]: E1124 01:14:49.996136 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:49 crc kubenswrapper[4755]: I1124 01:14:49.995704 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:49 crc kubenswrapper[4755]: E1124 01:14:49.997040 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:50 crc kubenswrapper[4755]: I1124 01:14:50.567181 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/1.log" Nov 24 01:14:50 crc kubenswrapper[4755]: I1124 01:14:50.568262 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/0.log" Nov 24 01:14:50 crc kubenswrapper[4755]: I1124 01:14:50.568329 4755 generic.go:334] "Generic (PLEG): container finished" podID="19dbf7ff-f684-4c57-803a-83b39e0705a4" containerID="edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685" exitCode=1 Nov 24 01:14:50 crc kubenswrapper[4755]: I1124 01:14:50.568368 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8pm69" event={"ID":"19dbf7ff-f684-4c57-803a-83b39e0705a4","Type":"ContainerDied","Data":"edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685"} Nov 24 01:14:50 crc kubenswrapper[4755]: I1124 01:14:50.568407 4755 scope.go:117] "RemoveContainer" containerID="090d3d275453f489254405c1197750888cc4d15d4251c6ce9b8a1a873319d5a4" Nov 24 01:14:50 crc kubenswrapper[4755]: I1124 01:14:50.568968 4755 scope.go:117] "RemoveContainer" containerID="edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685" Nov 24 01:14:50 crc kubenswrapper[4755]: E1124 01:14:50.569235 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-8pm69_openshift-multus(19dbf7ff-f684-4c57-803a-83b39e0705a4)\"" pod="openshift-multus/multus-8pm69" podUID="19dbf7ff-f684-4c57-803a-83b39e0705a4" Nov 24 01:14:50 crc kubenswrapper[4755]: I1124 01:14:50.996299 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:50 crc kubenswrapper[4755]: I1124 01:14:50.996356 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:50 crc kubenswrapper[4755]: E1124 01:14:50.997561 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:50 crc kubenswrapper[4755]: E1124 01:14:50.997588 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:51 crc kubenswrapper[4755]: I1124 01:14:51.574741 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/1.log" Nov 24 01:14:51 crc kubenswrapper[4755]: I1124 01:14:51.995910 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:51 crc kubenswrapper[4755]: I1124 01:14:51.995941 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:51 crc kubenswrapper[4755]: E1124 01:14:51.996027 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:51 crc kubenswrapper[4755]: E1124 01:14:51.996124 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:52 crc kubenswrapper[4755]: I1124 01:14:52.996590 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:52 crc kubenswrapper[4755]: I1124 01:14:52.996636 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:52 crc kubenswrapper[4755]: E1124 01:14:52.996822 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:52 crc kubenswrapper[4755]: E1124 01:14:52.997049 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:53 crc kubenswrapper[4755]: I1124 01:14:53.995650 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:53 crc kubenswrapper[4755]: I1124 01:14:53.995652 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:53 crc kubenswrapper[4755]: E1124 01:14:53.995829 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:53 crc kubenswrapper[4755]: E1124 01:14:53.996048 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:54 crc kubenswrapper[4755]: I1124 01:14:54.996547 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:54 crc kubenswrapper[4755]: E1124 01:14:54.996674 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:54 crc kubenswrapper[4755]: I1124 01:14:54.996768 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:54 crc kubenswrapper[4755]: E1124 01:14:54.996935 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:55 crc kubenswrapper[4755]: I1124 01:14:55.995935 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:55 crc kubenswrapper[4755]: I1124 01:14:55.995983 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:55 crc kubenswrapper[4755]: E1124 01:14:55.997062 4755 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Nov 24 01:14:55 crc kubenswrapper[4755]: E1124 01:14:55.997934 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:55 crc kubenswrapper[4755]: E1124 01:14:55.998117 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:56 crc kubenswrapper[4755]: E1124 01:14:56.121818 4755 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 01:14:56 crc kubenswrapper[4755]: I1124 01:14:56.996448 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:56 crc kubenswrapper[4755]: I1124 01:14:56.996448 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:56 crc kubenswrapper[4755]: E1124 01:14:56.996972 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:56 crc kubenswrapper[4755]: E1124 01:14:56.997129 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:57 crc kubenswrapper[4755]: I1124 01:14:57.995985 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:57 crc kubenswrapper[4755]: E1124 01:14:57.996201 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:14:57 crc kubenswrapper[4755]: I1124 01:14:57.996768 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:57 crc kubenswrapper[4755]: E1124 01:14:57.996982 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:57 crc kubenswrapper[4755]: I1124 01:14:57.997315 4755 scope.go:117] "RemoveContainer" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:14:58 crc kubenswrapper[4755]: I1124 01:14:58.602213 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/3.log" Nov 24 01:14:58 crc kubenswrapper[4755]: I1124 01:14:58.604626 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerStarted","Data":"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4"} Nov 24 01:14:58 crc kubenswrapper[4755]: I1124 01:14:58.605260 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:14:58 crc kubenswrapper[4755]: I1124 01:14:58.630037 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podStartSLOduration=103.630019842 podStartE2EDuration="1m43.630019842s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:14:58.629072124 +0000 UTC m=+123.315137625" watchObservedRunningTime="2025-11-24 01:14:58.630019842 +0000 UTC m=+123.316085343" Nov 24 01:14:58 crc kubenswrapper[4755]: I1124 01:14:58.844591 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-9cl8m"] Nov 24 01:14:58 crc kubenswrapper[4755]: I1124 01:14:58.844737 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:14:58 crc kubenswrapper[4755]: E1124 01:14:58.844860 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:14:58 crc kubenswrapper[4755]: I1124 01:14:58.995900 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:14:58 crc kubenswrapper[4755]: E1124 01:14:58.996016 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:14:59 crc kubenswrapper[4755]: I1124 01:14:59.996530 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:14:59 crc kubenswrapper[4755]: I1124 01:14:59.996656 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:14:59 crc kubenswrapper[4755]: E1124 01:14:59.997117 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:14:59 crc kubenswrapper[4755]: E1124 01:14:59.997331 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:15:00 crc kubenswrapper[4755]: I1124 01:15:00.995850 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:15:00 crc kubenswrapper[4755]: I1124 01:15:00.995879 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:15:00 crc kubenswrapper[4755]: E1124 01:15:00.995977 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:15:00 crc kubenswrapper[4755]: E1124 01:15:00.996059 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:15:01 crc kubenswrapper[4755]: E1124 01:15:01.124219 4755 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 01:15:01 crc kubenswrapper[4755]: I1124 01:15:01.996650 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:15:01 crc kubenswrapper[4755]: E1124 01:15:01.996886 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:15:01 crc kubenswrapper[4755]: I1124 01:15:01.996948 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:15:01 crc kubenswrapper[4755]: E1124 01:15:01.997132 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:15:02 crc kubenswrapper[4755]: I1124 01:15:02.996534 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:15:02 crc kubenswrapper[4755]: E1124 01:15:02.996766 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:15:02 crc kubenswrapper[4755]: I1124 01:15:02.996806 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:15:02 crc kubenswrapper[4755]: E1124 01:15:02.996962 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:15:03 crc kubenswrapper[4755]: I1124 01:15:03.996157 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:15:03 crc kubenswrapper[4755]: E1124 01:15:03.996341 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:15:03 crc kubenswrapper[4755]: I1124 01:15:03.996358 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:15:03 crc kubenswrapper[4755]: E1124 01:15:03.996686 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:15:03 crc kubenswrapper[4755]: I1124 01:15:03.997040 4755 scope.go:117] "RemoveContainer" containerID="edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685" Nov 24 01:15:04 crc kubenswrapper[4755]: I1124 01:15:04.625264 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/1.log" Nov 24 01:15:04 crc kubenswrapper[4755]: I1124 01:15:04.625331 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8pm69" event={"ID":"19dbf7ff-f684-4c57-803a-83b39e0705a4","Type":"ContainerStarted","Data":"7d63755ca911a04529ff56ebf1481bbd429aeb3305588a602d374328fb47890f"} Nov 24 01:15:04 crc kubenswrapper[4755]: I1124 01:15:04.996523 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:15:04 crc kubenswrapper[4755]: E1124 01:15:04.997060 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9cl8m" podUID="ccb86693-0b66-43ca-a2d1-e9594521d30f" Nov 24 01:15:04 crc kubenswrapper[4755]: I1124 01:15:04.996585 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:15:04 crc kubenswrapper[4755]: E1124 01:15:04.997213 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 01:15:05 crc kubenswrapper[4755]: I1124 01:15:05.995806 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:15:05 crc kubenswrapper[4755]: E1124 01:15:05.997712 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 01:15:05 crc kubenswrapper[4755]: I1124 01:15:05.997955 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:15:05 crc kubenswrapper[4755]: E1124 01:15:05.998201 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 01:15:06 crc kubenswrapper[4755]: I1124 01:15:06.996009 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:15:06 crc kubenswrapper[4755]: I1124 01:15:06.996023 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:15:06 crc kubenswrapper[4755]: I1124 01:15:06.998844 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:06.999827 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.000096 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.000234 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.308858 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.348019 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2qz86"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.348301 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pnvtd"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.348497 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.348741 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.349373 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.349734 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.353786 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-vnvq4"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.354090 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vmwlz"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.354479 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9f7kc"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.354790 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.355295 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-vnvq4" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.355498 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.358344 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.358780 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.359219 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.359685 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.363152 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2b47p"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.364108 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-mlls8"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.364439 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.366717 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.372326 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.372540 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.383620 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.386688 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.387083 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.387378 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.387459 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.387533 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.387626 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.387701 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.387763 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.387828 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.387907 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.387972 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.388036 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.388190 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.388267 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.388502 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.389170 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.389325 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.389436 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.389557 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.389705 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.389822 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.389955 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.390083 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.390456 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.390573 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.390804 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.390904 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.408272 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.408766 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vxz4v"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.409136 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.409341 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.409352 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.409812 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.409870 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.410767 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.412143 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.412243 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.413341 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.422382 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.431243 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.432684 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.432708 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.432870 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433016 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433134 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433184 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433252 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433374 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433488 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433586 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433646 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433735 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433844 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433950 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.433979 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.434362 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.434544 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.434651 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.434746 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.434822 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.434882 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.435043 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.436216 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.436700 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.437002 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.437143 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.437294 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.437417 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.437570 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.437706 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.437567 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.438150 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.438564 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.438722 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.439629 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.440051 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-p4sx7"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.440541 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.440852 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-snkpv"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.441353 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.446341 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.447194 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.448053 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.448223 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.453249 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.455560 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xtpxd"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.456254 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.459009 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.459648 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.461514 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462067 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462426 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e541e12-dcdd-4753-9607-282590cbd898-etcd-service-ca\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462474 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxflz\" (UniqueName: \"kubernetes.io/projected/2eb0eb3e-5d9d-421e-b33e-253f66f88ea9-kube-api-access-fxflz\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bwdn\" (UID: \"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462568 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85fb9244-9754-4924-b53c-51ccbf6a5220-serving-cert\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462595 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-serving-cert\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462635 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2eb0eb3e-5d9d-421e-b33e-253f66f88ea9-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bwdn\" (UID: \"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462662 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-trusted-ca-bundle\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462686 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-oauth-serving-cert\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462707 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-audit\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462731 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-dir\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462753 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zt9b\" (UniqueName: \"kubernetes.io/projected/d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4-kube-api-access-7zt9b\") pod \"downloads-7954f5f757-vnvq4\" (UID: \"d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4\") " pod="openshift-console/downloads-7954f5f757-vnvq4" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462774 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-service-ca\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462793 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e541e12-dcdd-4753-9607-282590cbd898-serving-cert\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462824 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fceb51f9-deec-4840-86d0-a67228819bef-images\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462846 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-client-ca\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462865 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/06b2b494-6e14-4fd2-8fc8-5b491090adaa-node-pullsecrets\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462884 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fceb51f9-deec-4840-86d0-a67228819bef-config\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462906 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-auth-proxy-config\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462927 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbsp6\" (UniqueName: \"kubernetes.io/projected/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-kube-api-access-qbsp6\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462950 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eb0eb3e-5d9d-421e-b33e-253f66f88ea9-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bwdn\" (UID: \"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462976 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.462999 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463024 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463063 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnqzq\" (UniqueName: \"kubernetes.io/projected/0e541e12-dcdd-4753-9607-282590cbd898-kube-api-access-rnqzq\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463087 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-client-ca\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463113 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463140 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463166 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463188 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463209 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/06b2b494-6e14-4fd2-8fc8-5b491090adaa-etcd-client\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463235 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/06b2b494-6e14-4fd2-8fc8-5b491090adaa-encryption-config\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463255 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-etcd-serving-ca\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463277 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m22w7\" (UniqueName: \"kubernetes.io/projected/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-kube-api-access-m22w7\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463298 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-policies\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463320 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-config\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463340 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463360 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06b2b494-6e14-4fd2-8fc8-5b491090adaa-serving-cert\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463383 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463405 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-474ng\" (UniqueName: \"kubernetes.io/projected/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-kube-api-access-474ng\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463427 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463447 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5wzj\" (UniqueName: \"kubernetes.io/projected/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-kube-api-access-r5wzj\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463468 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srp6c\" (UniqueName: \"kubernetes.io/projected/06b2b494-6e14-4fd2-8fc8-5b491090adaa-kube-api-access-srp6c\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463489 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e541e12-dcdd-4753-9607-282590cbd898-config\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463512 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klggk\" (UniqueName: \"kubernetes.io/projected/fceb51f9-deec-4840-86d0-a67228819bef-kube-api-access-klggk\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463533 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-machine-approver-tls\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463554 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463581 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-config\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463642 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fceb51f9-deec-4840-86d0-a67228819bef-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463666 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-config\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463689 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463712 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0e541e12-dcdd-4753-9607-282590cbd898-etcd-ca\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463751 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463774 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-oauth-config\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463796 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/06b2b494-6e14-4fd2-8fc8-5b491090adaa-audit-dir\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463816 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-config\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463839 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-serving-cert\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463859 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-config\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463878 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0e541e12-dcdd-4753-9607-282590cbd898-etcd-client\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463908 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbjcr\" (UniqueName: \"kubernetes.io/projected/85fb9244-9754-4924-b53c-51ccbf6a5220-kube-api-access-kbjcr\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.463929 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-image-import-ca\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.464910 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.465377 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.465403 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.468853 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.469235 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.469674 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.470324 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.470755 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.471067 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.473086 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.473467 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.475476 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.475665 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.475690 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.477224 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.477418 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.481467 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.481742 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.482993 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.485071 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.485232 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.485947 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.486166 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.487478 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.488104 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.489091 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.489878 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.490253 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.490271 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.490258 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.490389 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.497085 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.507684 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xr447"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.521446 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.522752 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.523154 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.523322 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.523478 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.523651 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.523721 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.523847 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.524086 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.525170 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.532698 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.533429 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.533508 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.539915 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.540167 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.544548 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.545398 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.545555 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.547127 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.547580 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.547718 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-x78dk"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.548042 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.548738 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.548898 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.549728 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.550102 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.550643 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.550987 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.553668 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.554193 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-nfhp7"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.554641 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.554912 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.555101 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rn5m4"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.555492 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.556342 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.557069 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.558804 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-57p55"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.559717 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pnvtd"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.559848 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.562676 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.563979 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2qz86"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564445 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564500 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06b2b494-6e14-4fd2-8fc8-5b491090adaa-serving-cert\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564535 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/fa143983-92e8-480e-9bb3-928892077000-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6qvk7\" (UID: \"fa143983-92e8-480e-9bb3-928892077000\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564563 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564590 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-474ng\" (UniqueName: \"kubernetes.io/projected/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-kube-api-access-474ng\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564640 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d274950d-dc6c-424c-b87e-a7b6e88b6092-config\") pod \"service-ca-operator-777779d784-xr447\" (UID: \"d274950d-dc6c-424c-b87e-a7b6e88b6092\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564665 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srp6c\" (UniqueName: \"kubernetes.io/projected/06b2b494-6e14-4fd2-8fc8-5b491090adaa-kube-api-access-srp6c\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564684 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e541e12-dcdd-4753-9607-282590cbd898-config\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564708 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564729 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5wzj\" (UniqueName: \"kubernetes.io/projected/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-kube-api-access-r5wzj\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564748 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klggk\" (UniqueName: \"kubernetes.io/projected/fceb51f9-deec-4840-86d0-a67228819bef-kube-api-access-klggk\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564768 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-machine-approver-tls\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564794 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxp77\" (UniqueName: \"kubernetes.io/projected/77b33d59-46dc-45ae-9ded-cbd3918f79fd-kube-api-access-mxp77\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564826 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-config\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564850 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564872 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3de6425b-7697-42d0-8b32-0a6e91078e9f-serving-cert\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564905 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fceb51f9-deec-4840-86d0-a67228819bef-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564926 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3de6425b-7697-42d0-8b32-0a6e91078e9f-encryption-config\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564956 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9511d1a-d4c3-48e5-add0-a09b3543f768-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s445k\" (UID: \"f9511d1a-d4c3-48e5-add0-a09b3543f768\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564977 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0e541e12-dcdd-4753-9607-282590cbd898-etcd-ca\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.564998 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0a130010-6476-46e9-be91-f9040275be7b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565032 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-config\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565051 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565073 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565091 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-oauth-config\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565112 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c55b3456-832f-47a0-90fc-383747422868-serving-cert\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565129 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3de6425b-7697-42d0-8b32-0a6e91078e9f-audit-policies\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565147 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-serving-cert\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565165 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/06b2b494-6e14-4fd2-8fc8-5b491090adaa-audit-dir\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565185 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-config\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565204 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-config\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565236 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0e541e12-dcdd-4753-9607-282590cbd898-etcd-client\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565267 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbjcr\" (UniqueName: \"kubernetes.io/projected/85fb9244-9754-4924-b53c-51ccbf6a5220-kube-api-access-kbjcr\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565288 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-image-import-ca\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565310 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c55b3456-832f-47a0-90fc-383747422868-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565329 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c55b3456-832f-47a0-90fc-383747422868-service-ca-bundle\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565361 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85fb9244-9754-4924-b53c-51ccbf6a5220-serving-cert\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565381 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e541e12-dcdd-4753-9607-282590cbd898-etcd-service-ca\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565402 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxflz\" (UniqueName: \"kubernetes.io/projected/2eb0eb3e-5d9d-421e-b33e-253f66f88ea9-kube-api-access-fxflz\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bwdn\" (UID: \"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565423 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q2rk\" (UniqueName: \"kubernetes.io/projected/c55b3456-832f-47a0-90fc-383747422868-kube-api-access-6q2rk\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565445 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-serving-cert\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565482 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2eb0eb3e-5d9d-421e-b33e-253f66f88ea9-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bwdn\" (UID: \"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565502 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3de6425b-7697-42d0-8b32-0a6e91078e9f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565514 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565522 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a130010-6476-46e9-be91-f9040275be7b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565543 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d274950d-dc6c-424c-b87e-a7b6e88b6092-serving-cert\") pod \"service-ca-operator-777779d784-xr447\" (UID: \"d274950d-dc6c-424c-b87e-a7b6e88b6092\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565563 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-oauth-serving-cert\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565583 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-audit\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.565622 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-trusted-ca-bundle\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.566451 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568509 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3de6425b-7697-42d0-8b32-0a6e91078e9f-audit-dir\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568553 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-dir\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568576 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zt9b\" (UniqueName: \"kubernetes.io/projected/d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4-kube-api-access-7zt9b\") pod \"downloads-7954f5f757-vnvq4\" (UID: \"d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4\") " pod="openshift-console/downloads-7954f5f757-vnvq4" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568594 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-service-ca\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568633 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e541e12-dcdd-4753-9607-282590cbd898-serving-cert\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568834 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fceb51f9-deec-4840-86d0-a67228819bef-images\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568863 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3de6425b-7697-42d0-8b32-0a6e91078e9f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568885 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxptr\" (UniqueName: \"kubernetes.io/projected/0a130010-6476-46e9-be91-f9040275be7b-kube-api-access-nxptr\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568906 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fceb51f9-deec-4840-86d0-a67228819bef-config\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568927 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-auth-proxy-config\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568946 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbsp6\" (UniqueName: \"kubernetes.io/projected/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-kube-api-access-qbsp6\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568964 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-client-ca\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568984 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/06b2b494-6e14-4fd2-8fc8-5b491090adaa-node-pullsecrets\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.568999 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eb0eb3e-5d9d-421e-b33e-253f66f88ea9-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bwdn\" (UID: \"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.569019 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9511d1a-d4c3-48e5-add0-a09b3543f768-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s445k\" (UID: \"f9511d1a-d4c3-48e5-add0-a09b3543f768\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.569036 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a130010-6476-46e9-be91-f9040275be7b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.569052 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/644dc770-f6b6-4300-a800-642311c680bf-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6zk9r\" (UID: \"644dc770-f6b6-4300-a800-642311c680bf\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.569067 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4xr5\" (UniqueName: \"kubernetes.io/projected/d274950d-dc6c-424c-b87e-a7b6e88b6092-kube-api-access-k4xr5\") pod \"service-ca-operator-777779d784-xr447\" (UID: \"d274950d-dc6c-424c-b87e-a7b6e88b6092\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.569091 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.569107 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3de6425b-7697-42d0-8b32-0a6e91078e9f-etcd-client\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.569573 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-trusted-ca-bundle\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.570905 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-config\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.571489 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e541e12-dcdd-4753-9607-282590cbd898-config\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.572535 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-oauth-serving-cert\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.573050 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-dir\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.573287 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.573413 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/06b2b494-6e14-4fd2-8fc8-5b491090adaa-node-pullsecrets\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.573829 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-client-ca\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.574294 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-service-ca\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.574528 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fceb51f9-deec-4840-86d0-a67228819bef-config\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.574771 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0e541e12-dcdd-4753-9607-282590cbd898-etcd-ca\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.574894 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.575160 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.575280 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.575396 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnqzq\" (UniqueName: \"kubernetes.io/projected/0e541e12-dcdd-4753-9607-282590cbd898-kube-api-access-rnqzq\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.575507 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/644dc770-f6b6-4300-a800-642311c680bf-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6zk9r\" (UID: \"644dc770-f6b6-4300-a800-642311c680bf\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.575630 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-client-ca\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.575727 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.575858 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-auth-proxy-config\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.575930 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/06b2b494-6e14-4fd2-8fc8-5b491090adaa-audit-dir\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.575078 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-config\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577423 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fceb51f9-deec-4840-86d0-a67228819bef-images\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577455 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577522 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c55b3456-832f-47a0-90fc-383747422868-config\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577642 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brxfl\" (UniqueName: \"kubernetes.io/projected/fa143983-92e8-480e-9bb3-928892077000-kube-api-access-brxfl\") pod \"control-plane-machine-set-operator-78cbb6b69f-6qvk7\" (UID: \"fa143983-92e8-480e-9bb3-928892077000\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577654 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-image-import-ca\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577677 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577734 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9511d1a-d4c3-48e5-add0-a09b3543f768-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s445k\" (UID: \"f9511d1a-d4c3-48e5-add0-a09b3543f768\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577760 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/47cac8d4-7293-45b3-9142-7717f79f973b-metrics-tls\") pod \"dns-operator-744455d44c-snkpv\" (UID: \"47cac8d4-7293-45b3-9142-7717f79f973b\") " pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577846 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577908 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577957 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/06b2b494-6e14-4fd2-8fc8-5b491090adaa-etcd-client\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.577987 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/06b2b494-6e14-4fd2-8fc8-5b491090adaa-encryption-config\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.578038 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5lkn\" (UniqueName: \"kubernetes.io/projected/3de6425b-7697-42d0-8b32-0a6e91078e9f-kube-api-access-l5lkn\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.578067 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plwm4\" (UniqueName: \"kubernetes.io/projected/47cac8d4-7293-45b3-9142-7717f79f973b-kube-api-access-plwm4\") pod \"dns-operator-744455d44c-snkpv\" (UID: \"47cac8d4-7293-45b3-9142-7717f79f973b\") " pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.579073 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-config\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.579374 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.580055 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eb0eb3e-5d9d-421e-b33e-253f66f88ea9-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bwdn\" (UID: \"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.580566 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-config\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.581375 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m22w7\" (UniqueName: \"kubernetes.io/projected/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-kube-api-access-m22w7\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.581429 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-etcd-serving-ca\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.581578 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-policies\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.581633 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-config\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.581661 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/77b33d59-46dc-45ae-9ded-cbd3918f79fd-proxy-tls\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.581707 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/77b33d59-46dc-45ae-9ded-cbd3918f79fd-images\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.581728 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/77b33d59-46dc-45ae-9ded-cbd3918f79fd-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.581756 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c7sh\" (UniqueName: \"kubernetes.io/projected/644dc770-f6b6-4300-a800-642311c680bf-kube-api-access-5c7sh\") pod \"openshift-apiserver-operator-796bbdcf4f-6zk9r\" (UID: \"644dc770-f6b6-4300-a800-642311c680bf\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.582282 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.582667 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-etcd-serving-ca\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.582922 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-config\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.583080 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-policies\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.584015 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e541e12-dcdd-4753-9607-282590cbd898-etcd-service-ca\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.584175 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-client-ca\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.584578 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.585256 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/06b2b494-6e14-4fd2-8fc8-5b491090adaa-etcd-client\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.585376 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2eb0eb3e-5d9d-421e-b33e-253f66f88ea9-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bwdn\" (UID: \"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.586017 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0e541e12-dcdd-4753-9607-282590cbd898-etcd-client\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.586596 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.586655 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-vnvq4"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.587199 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/06b2b494-6e14-4fd2-8fc8-5b491090adaa-encryption-config\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.587334 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-serving-cert\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.587770 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/06b2b494-6e14-4fd2-8fc8-5b491090adaa-audit\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.589128 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.589397 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.589812 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-machine-approver-tls\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.589920 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fceb51f9-deec-4840-86d0-a67228819bef-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.590074 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.590151 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85fb9244-9754-4924-b53c-51ccbf6a5220-serving-cert\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.590644 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06b2b494-6e14-4fd2-8fc8-5b491090adaa-serving-cert\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.590689 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.591000 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2b47p"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.591090 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e541e12-dcdd-4753-9607-282590cbd898-serving-cert\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.591331 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-oauth-config\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.592444 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-p4sx7"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.593584 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.594922 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.596232 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.597662 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vmwlz"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.603541 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.603629 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.603644 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vxz4v"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.610202 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.610296 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-snkpv"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.610331 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xtpxd"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.610346 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-mlls8"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.610999 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.611237 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-serving-cert\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.613069 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.616402 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.618651 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.619695 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.621715 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.624675 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-mzvv7"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.626055 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-mzvv7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.628921 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.630776 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xr447"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.631521 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.632646 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-x78dk"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.634646 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9f7kc"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.636645 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.638073 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-57p55"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.639559 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.640759 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.645651 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.648064 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.649806 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.651011 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.653505 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.655452 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.656867 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.658021 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.660365 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-mzvv7"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.661385 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-l6tpb"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.662052 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.664386 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rn5m4"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.666899 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mqx68"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.667974 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.668577 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-g92v4"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.669271 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.670612 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.670914 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mqx68"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.672504 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-g92v4"] Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682203 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/77b33d59-46dc-45ae-9ded-cbd3918f79fd-proxy-tls\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682232 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/77b33d59-46dc-45ae-9ded-cbd3918f79fd-images\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682251 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/77b33d59-46dc-45ae-9ded-cbd3918f79fd-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682269 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c7sh\" (UniqueName: \"kubernetes.io/projected/644dc770-f6b6-4300-a800-642311c680bf-kube-api-access-5c7sh\") pod \"openshift-apiserver-operator-796bbdcf4f-6zk9r\" (UID: \"644dc770-f6b6-4300-a800-642311c680bf\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682302 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/fa143983-92e8-480e-9bb3-928892077000-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6qvk7\" (UID: \"fa143983-92e8-480e-9bb3-928892077000\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682326 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d274950d-dc6c-424c-b87e-a7b6e88b6092-config\") pod \"service-ca-operator-777779d784-xr447\" (UID: \"d274950d-dc6c-424c-b87e-a7b6e88b6092\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682360 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxp77\" (UniqueName: \"kubernetes.io/projected/77b33d59-46dc-45ae-9ded-cbd3918f79fd-kube-api-access-mxp77\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682376 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3de6425b-7697-42d0-8b32-0a6e91078e9f-serving-cert\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682391 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3de6425b-7697-42d0-8b32-0a6e91078e9f-encryption-config\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682407 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9511d1a-d4c3-48e5-add0-a09b3543f768-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s445k\" (UID: \"f9511d1a-d4c3-48e5-add0-a09b3543f768\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682431 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0a130010-6476-46e9-be91-f9040275be7b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682448 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c55b3456-832f-47a0-90fc-383747422868-serving-cert\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682462 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3de6425b-7697-42d0-8b32-0a6e91078e9f-audit-policies\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682502 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c55b3456-832f-47a0-90fc-383747422868-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682517 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c55b3456-832f-47a0-90fc-383747422868-service-ca-bundle\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682531 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q2rk\" (UniqueName: \"kubernetes.io/projected/c55b3456-832f-47a0-90fc-383747422868-kube-api-access-6q2rk\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682547 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3de6425b-7697-42d0-8b32-0a6e91078e9f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682562 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a130010-6476-46e9-be91-f9040275be7b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682578 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d274950d-dc6c-424c-b87e-a7b6e88b6092-serving-cert\") pod \"service-ca-operator-777779d784-xr447\" (UID: \"d274950d-dc6c-424c-b87e-a7b6e88b6092\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682594 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3de6425b-7697-42d0-8b32-0a6e91078e9f-audit-dir\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682644 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3de6425b-7697-42d0-8b32-0a6e91078e9f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682665 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxptr\" (UniqueName: \"kubernetes.io/projected/0a130010-6476-46e9-be91-f9040275be7b-kube-api-access-nxptr\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682695 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9511d1a-d4c3-48e5-add0-a09b3543f768-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s445k\" (UID: \"f9511d1a-d4c3-48e5-add0-a09b3543f768\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682712 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a130010-6476-46e9-be91-f9040275be7b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682728 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/644dc770-f6b6-4300-a800-642311c680bf-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6zk9r\" (UID: \"644dc770-f6b6-4300-a800-642311c680bf\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682745 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4xr5\" (UniqueName: \"kubernetes.io/projected/d274950d-dc6c-424c-b87e-a7b6e88b6092-kube-api-access-k4xr5\") pod \"service-ca-operator-777779d784-xr447\" (UID: \"d274950d-dc6c-424c-b87e-a7b6e88b6092\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682760 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3de6425b-7697-42d0-8b32-0a6e91078e9f-etcd-client\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682789 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/644dc770-f6b6-4300-a800-642311c680bf-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6zk9r\" (UID: \"644dc770-f6b6-4300-a800-642311c680bf\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682807 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c55b3456-832f-47a0-90fc-383747422868-config\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682821 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brxfl\" (UniqueName: \"kubernetes.io/projected/fa143983-92e8-480e-9bb3-928892077000-kube-api-access-brxfl\") pod \"control-plane-machine-set-operator-78cbb6b69f-6qvk7\" (UID: \"fa143983-92e8-480e-9bb3-928892077000\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682836 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9511d1a-d4c3-48e5-add0-a09b3543f768-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s445k\" (UID: \"f9511d1a-d4c3-48e5-add0-a09b3543f768\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682851 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/47cac8d4-7293-45b3-9142-7717f79f973b-metrics-tls\") pod \"dns-operator-744455d44c-snkpv\" (UID: \"47cac8d4-7293-45b3-9142-7717f79f973b\") " pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682871 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5lkn\" (UniqueName: \"kubernetes.io/projected/3de6425b-7697-42d0-8b32-0a6e91078e9f-kube-api-access-l5lkn\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682886 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plwm4\" (UniqueName: \"kubernetes.io/projected/47cac8d4-7293-45b3-9142-7717f79f973b-kube-api-access-plwm4\") pod \"dns-operator-744455d44c-snkpv\" (UID: \"47cac8d4-7293-45b3-9142-7717f79f973b\") " pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.682971 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/77b33d59-46dc-45ae-9ded-cbd3918f79fd-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.683667 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3de6425b-7697-42d0-8b32-0a6e91078e9f-audit-policies\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.683732 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3de6425b-7697-42d0-8b32-0a6e91078e9f-audit-dir\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.684129 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3de6425b-7697-42d0-8b32-0a6e91078e9f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.684342 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3de6425b-7697-42d0-8b32-0a6e91078e9f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.684412 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/644dc770-f6b6-4300-a800-642311c680bf-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6zk9r\" (UID: \"644dc770-f6b6-4300-a800-642311c680bf\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.685252 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3de6425b-7697-42d0-8b32-0a6e91078e9f-serving-cert\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.685303 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a130010-6476-46e9-be91-f9040275be7b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.686106 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3de6425b-7697-42d0-8b32-0a6e91078e9f-encryption-config\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.686252 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0a130010-6476-46e9-be91-f9040275be7b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.686691 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/644dc770-f6b6-4300-a800-642311c680bf-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6zk9r\" (UID: \"644dc770-f6b6-4300-a800-642311c680bf\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.687094 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/47cac8d4-7293-45b3-9142-7717f79f973b-metrics-tls\") pod \"dns-operator-744455d44c-snkpv\" (UID: \"47cac8d4-7293-45b3-9142-7717f79f973b\") " pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.689294 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3de6425b-7697-42d0-8b32-0a6e91078e9f-etcd-client\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.691224 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.697503 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c55b3456-832f-47a0-90fc-383747422868-serving-cert\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.710756 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.714518 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c55b3456-832f-47a0-90fc-383747422868-config\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.737960 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.745746 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c55b3456-832f-47a0-90fc-383747422868-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.750958 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.754206 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c55b3456-832f-47a0-90fc-383747422868-service-ca-bundle\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.771494 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.790990 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.797131 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/fa143983-92e8-480e-9bb3-928892077000-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6qvk7\" (UID: \"fa143983-92e8-480e-9bb3-928892077000\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.811597 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.832276 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.851496 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.871669 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.891561 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.910975 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.930847 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.950873 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.972285 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.978270 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9511d1a-d4c3-48e5-add0-a09b3543f768-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s445k\" (UID: \"f9511d1a-d4c3-48e5-add0-a09b3543f768\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.991317 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.994394 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9511d1a-d4c3-48e5-add0-a09b3543f768-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s445k\" (UID: \"f9511d1a-d4c3-48e5-add0-a09b3543f768\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.996260 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:15:07 crc kubenswrapper[4755]: I1124 01:15:07.996314 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.010807 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.031883 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.051431 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.081739 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.092040 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.111640 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.113759 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/77b33d59-46dc-45ae-9ded-cbd3918f79fd-images\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.131506 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.151389 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.171987 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.176537 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/77b33d59-46dc-45ae-9ded-cbd3918f79fd-proxy-tls\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.210866 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.231178 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.238576 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d274950d-dc6c-424c-b87e-a7b6e88b6092-serving-cert\") pod \"service-ca-operator-777779d784-xr447\" (UID: \"d274950d-dc6c-424c-b87e-a7b6e88b6092\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.251794 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.272128 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.291301 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.294390 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d274950d-dc6c-424c-b87e-a7b6e88b6092-config\") pod \"service-ca-operator-777779d784-xr447\" (UID: \"d274950d-dc6c-424c-b87e-a7b6e88b6092\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.331862 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.351798 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.371848 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.390832 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.411274 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.431417 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.454589 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.471812 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.490776 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.510957 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.531828 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.549939 4755 request.go:700] Waited for 1.000988139s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.551816 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.579446 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.590773 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.611355 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.631992 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.650940 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.671434 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.691374 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.710504 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.731196 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.751783 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.772683 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.790971 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.811548 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.831256 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.851768 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.871174 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.891941 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.913765 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.931925 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.952458 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.972077 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 24 01:15:08 crc kubenswrapper[4755]: I1124 01:15:08.991112 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.012313 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.032077 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.051570 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.071442 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.092900 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.112003 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.131938 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.180895 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srp6c\" (UniqueName: \"kubernetes.io/projected/06b2b494-6e14-4fd2-8fc8-5b491090adaa-kube-api-access-srp6c\") pod \"apiserver-76f77b778f-vmwlz\" (UID: \"06b2b494-6e14-4fd2-8fc8-5b491090adaa\") " pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.199500 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-474ng\" (UniqueName: \"kubernetes.io/projected/1d3d34ab-42fa-4eaf-98fc-247a14f1231e-kube-api-access-474ng\") pod \"machine-approver-56656f9798-cnqln\" (UID: \"1d3d34ab-42fa-4eaf-98fc-247a14f1231e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.215218 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5wzj\" (UniqueName: \"kubernetes.io/projected/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-kube-api-access-r5wzj\") pod \"oauth-openshift-558db77b4-pnvtd\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.228641 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klggk\" (UniqueName: \"kubernetes.io/projected/fceb51f9-deec-4840-86d0-a67228819bef-kube-api-access-klggk\") pod \"machine-api-operator-5694c8668f-9f7kc\" (UID: \"fceb51f9-deec-4840-86d0-a67228819bef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.241200 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.249016 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.252420 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbjcr\" (UniqueName: \"kubernetes.io/projected/85fb9244-9754-4924-b53c-51ccbf6a5220-kube-api-access-kbjcr\") pod \"route-controller-manager-6576b87f9c-4gmv8\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.269887 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zt9b\" (UniqueName: \"kubernetes.io/projected/d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4-kube-api-access-7zt9b\") pod \"downloads-7954f5f757-vnvq4\" (UID: \"d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4\") " pod="openshift-console/downloads-7954f5f757-vnvq4" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.286093 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.287568 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbsp6\" (UniqueName: \"kubernetes.io/projected/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-kube-api-access-qbsp6\") pod \"controller-manager-879f6c89f-2qz86\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.306068 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnqzq\" (UniqueName: \"kubernetes.io/projected/0e541e12-dcdd-4753-9607-282590cbd898-kube-api-access-rnqzq\") pod \"etcd-operator-b45778765-2b47p\" (UID: \"0e541e12-dcdd-4753-9607-282590cbd898\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.330269 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.331419 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxflz\" (UniqueName: \"kubernetes.io/projected/2eb0eb3e-5d9d-421e-b33e-253f66f88ea9-kube-api-access-fxflz\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bwdn\" (UID: \"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.342309 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.353104 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.353472 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m22w7\" (UniqueName: \"kubernetes.io/projected/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-kube-api-access-m22w7\") pod \"console-f9d7485db-mlls8\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.359270 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.371460 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.373478 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.392096 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.412107 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.431784 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.451713 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.478986 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.479214 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.482160 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9f7kc"] Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.491338 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.491900 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.511429 4755 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.532102 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.551027 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.561452 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-vnvq4" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.569785 4755 request.go:700] Waited for 1.900290462s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-default-metrics-tls&limit=500&resourceVersion=0 Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.571696 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.592736 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.632494 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c7sh\" (UniqueName: \"kubernetes.io/projected/644dc770-f6b6-4300-a800-642311c680bf-kube-api-access-5c7sh\") pod \"openshift-apiserver-operator-796bbdcf4f-6zk9r\" (UID: \"644dc770-f6b6-4300-a800-642311c680bf\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.646076 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" event={"ID":"1d3d34ab-42fa-4eaf-98fc-247a14f1231e","Type":"ContainerStarted","Data":"2c506b5a7c787b018d410da1ed41544a5e375bdf4a67057c65c0368107af6d9c"} Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.647555 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" event={"ID":"fceb51f9-deec-4840-86d0-a67228819bef","Type":"ContainerStarted","Data":"c03d5f125e511f48424b246e1afaae543a432464334e7e89b4fd3dafbfe6daee"} Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.660276 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxp77\" (UniqueName: \"kubernetes.io/projected/77b33d59-46dc-45ae-9ded-cbd3918f79fd-kube-api-access-mxp77\") pod \"machine-config-operator-74547568cd-zjcfn\" (UID: \"77b33d59-46dc-45ae-9ded-cbd3918f79fd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.669322 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9511d1a-d4c3-48e5-add0-a09b3543f768-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s445k\" (UID: \"f9511d1a-d4c3-48e5-add0-a09b3543f768\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.693883 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plwm4\" (UniqueName: \"kubernetes.io/projected/47cac8d4-7293-45b3-9142-7717f79f973b-kube-api-access-plwm4\") pod \"dns-operator-744455d44c-snkpv\" (UID: \"47cac8d4-7293-45b3-9142-7717f79f973b\") " pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.704540 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vmwlz"] Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.710388 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pnvtd"] Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.717093 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxptr\" (UniqueName: \"kubernetes.io/projected/0a130010-6476-46e9-be91-f9040275be7b-kube-api-access-nxptr\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.725211 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brxfl\" (UniqueName: \"kubernetes.io/projected/fa143983-92e8-480e-9bb3-928892077000-kube-api-access-brxfl\") pod \"control-plane-machine-set-operator-78cbb6b69f-6qvk7\" (UID: \"fa143983-92e8-480e-9bb3-928892077000\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" Nov 24 01:15:09 crc kubenswrapper[4755]: W1124 01:15:09.728174 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06b2b494_6e14_4fd2_8fc8_5b491090adaa.slice/crio-1a4b917b1f2b4a6cbf9ca3be656100dea2330c7c46d0d1e84c1dbff09fdb87b6 WatchSource:0}: Error finding container 1a4b917b1f2b4a6cbf9ca3be656100dea2330c7c46d0d1e84c1dbff09fdb87b6: Status 404 returned error can't find the container with id 1a4b917b1f2b4a6cbf9ca3be656100dea2330c7c46d0d1e84c1dbff09fdb87b6 Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.745831 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5lkn\" (UniqueName: \"kubernetes.io/projected/3de6425b-7697-42d0-8b32-0a6e91078e9f-kube-api-access-l5lkn\") pod \"apiserver-7bbb656c7d-cslz2\" (UID: \"3de6425b-7697-42d0-8b32-0a6e91078e9f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.748229 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.749252 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2qz86"] Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.765292 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a130010-6476-46e9-be91-f9040275be7b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-tvwrx\" (UID: \"0a130010-6476-46e9-be91-f9040275be7b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:09 crc kubenswrapper[4755]: W1124 01:15:09.766167 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f1350c1_7f2f_4c43_9029_a33ab1eb24a8.slice/crio-5bea381c248e511e39c6c0529bbbc38a26970d8a045ed4106626230c7e561b0d WatchSource:0}: Error finding container 5bea381c248e511e39c6c0529bbbc38a26970d8a045ed4106626230c7e561b0d: Status 404 returned error can't find the container with id 5bea381c248e511e39c6c0529bbbc38a26970d8a045ed4106626230c7e561b0d Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.775849 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.781984 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.784458 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q2rk\" (UniqueName: \"kubernetes.io/projected/c55b3456-832f-47a0-90fc-383747422868-kube-api-access-6q2rk\") pod \"authentication-operator-69f744f599-xtpxd\" (UID: \"c55b3456-832f-47a0-90fc-383747422868\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.791085 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-mlls8"] Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.804455 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4xr5\" (UniqueName: \"kubernetes.io/projected/d274950d-dc6c-424c-b87e-a7b6e88b6092-kube-api-access-k4xr5\") pod \"service-ca-operator-777779d784-xr447\" (UID: \"d274950d-dc6c-424c-b87e-a7b6e88b6092\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:09 crc kubenswrapper[4755]: W1124 01:15:09.809111 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65c5f11b_931e_4dc2_8c3e_c7180b94ec08.slice/crio-9ddbc9c26d2db07f6aeac1a684db3dad2c1fa644de7e15e20e9ee00bb7c79f40 WatchSource:0}: Error finding container 9ddbc9c26d2db07f6aeac1a684db3dad2c1fa644de7e15e20e9ee00bb7c79f40: Status 404 returned error can't find the container with id 9ddbc9c26d2db07f6aeac1a684db3dad2c1fa644de7e15e20e9ee00bb7c79f40 Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.811560 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.811664 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.818577 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.831092 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn"] Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.831112 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.832759 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.834200 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2b47p"] Nov 24 01:15:09 crc kubenswrapper[4755]: W1124 01:15:09.843929 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2eb0eb3e_5d9d_421e_b33e_253f66f88ea9.slice/crio-7ab28ba51e27dfc3756edd5d2172088995f7e77071a08b4cf6ec511b5aad7e43 WatchSource:0}: Error finding container 7ab28ba51e27dfc3756edd5d2172088995f7e77071a08b4cf6ec511b5aad7e43: Status 404 returned error can't find the container with id 7ab28ba51e27dfc3756edd5d2172088995f7e77071a08b4cf6ec511b5aad7e43 Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.851894 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.865713 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" Nov 24 01:15:09 crc kubenswrapper[4755]: W1124 01:15:09.874755 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e541e12_dcdd_4753_9607_282590cbd898.slice/crio-1aadcd14c23bcd489101ded341e9162e5f46bb72e2a25073e5954eefd51cde53 WatchSource:0}: Error finding container 1aadcd14c23bcd489101ded341e9162e5f46bb72e2a25073e5954eefd51cde53: Status 404 returned error can't find the container with id 1aadcd14c23bcd489101ded341e9162e5f46bb72e2a25073e5954eefd51cde53 Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.914488 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-tls\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.914583 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2ca81542-2eef-4099-92bd-301845e4d3c8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.914665 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sxqc\" (UniqueName: \"kubernetes.io/projected/d8dc4b77-8b76-4646-b593-dcc3c1b66403-kube-api-access-2sxqc\") pod \"catalog-operator-68c6474976-2v27w\" (UID: \"d8dc4b77-8b76-4646-b593-dcc3c1b66403\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.914761 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/67b21633-2061-4642-b7b0-e784b17bebfe-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b7wq9\" (UID: \"67b21633-2061-4642-b7b0-e784b17bebfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.915804 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e9851e18-35a0-465e-9bdf-e180e4576679-srv-cert\") pod \"olm-operator-6b444d44fb-xtvfv\" (UID: \"e9851e18-35a0-465e-9bdf-e180e4576679\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.916106 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1aa96598-89c0-44a3-be99-13cdef9b84dc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.917947 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ad829316-549b-4b0d-8e17-ded37a000f66-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hqh6f\" (UID: \"ad829316-549b-4b0d-8e17-ded37a000f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.918033 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp94x\" (UniqueName: \"kubernetes.io/projected/e9851e18-35a0-465e-9bdf-e180e4576679-kube-api-access-wp94x\") pod \"olm-operator-6b444d44fb-xtvfv\" (UID: \"e9851e18-35a0-465e-9bdf-e180e4576679\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.918701 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5dfp\" (UniqueName: \"kubernetes.io/projected/67b21633-2061-4642-b7b0-e784b17bebfe-kube-api-access-j5dfp\") pod \"cluster-samples-operator-665b6dd947-b7wq9\" (UID: \"67b21633-2061-4642-b7b0-e784b17bebfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.918732 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d8dc4b77-8b76-4646-b593-dcc3c1b66403-srv-cert\") pod \"catalog-operator-68c6474976-2v27w\" (UID: \"d8dc4b77-8b76-4646-b593-dcc3c1b66403\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.918856 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:09 crc kubenswrapper[4755]: E1124 01:15:09.919764 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:10.419725156 +0000 UTC m=+135.105790657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.920981 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ad829316-549b-4b0d-8e17-ded37a000f66-serving-cert\") pod \"openshift-config-operator-7777fb866f-hqh6f\" (UID: \"ad829316-549b-4b0d-8e17-ded37a000f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.921578 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1aa96598-89c0-44a3-be99-13cdef9b84dc-metrics-tls\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.921663 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-trusted-ca\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.924901 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rxk8\" (UniqueName: \"kubernetes.io/projected/d2019d3e-6460-4c69-b9cb-afdad2426d6a-kube-api-access-6rxk8\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925030 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsv6p\" (UniqueName: \"kubernetes.io/projected/1aa96598-89c0-44a3-be99-13cdef9b84dc-kube-api-access-qsv6p\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925077 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2019d3e-6460-4c69-b9cb-afdad2426d6a-config\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925121 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e9851e18-35a0-465e-9bdf-e180e4576679-profile-collector-cert\") pod \"olm-operator-6b444d44fb-xtvfv\" (UID: \"e9851e18-35a0-465e-9bdf-e180e4576679\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925282 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1aa96598-89c0-44a3-be99-13cdef9b84dc-trusted-ca\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925316 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p68gd\" (UniqueName: \"kubernetes.io/projected/ad829316-549b-4b0d-8e17-ded37a000f66-kube-api-access-p68gd\") pod \"openshift-config-operator-7777fb866f-hqh6f\" (UID: \"ad829316-549b-4b0d-8e17-ded37a000f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925412 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2ca81542-2eef-4099-92bd-301845e4d3c8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925454 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2019d3e-6460-4c69-b9cb-afdad2426d6a-trusted-ca\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925486 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-certificates\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925509 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrd5w\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-kube-api-access-rrd5w\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925533 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2019d3e-6460-4c69-b9cb-afdad2426d6a-serving-cert\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925584 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-bound-sa-token\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.925621 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d8dc4b77-8b76-4646-b593-dcc3c1b66403-profile-collector-cert\") pod \"catalog-operator-68c6474976-2v27w\" (UID: \"d8dc4b77-8b76-4646-b593-dcc3c1b66403\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.975920 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r"] Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.988414 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8"] Nov 24 01:15:09 crc kubenswrapper[4755]: I1124 01:15:09.988461 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-vnvq4"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.026415 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.026553 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:10.526537968 +0000 UTC m=+135.212603469 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.026753 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rxk8\" (UniqueName: \"kubernetes.io/projected/d2019d3e-6460-4c69-b9cb-afdad2426d6a-kube-api-access-6rxk8\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.026829 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/85241300-e898-4330-8da8-577ddc45c251-apiservice-cert\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.026941 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-metrics-certs\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.026975 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c1c7d9e5-2214-41f9-ba56-7843592e8865-certs\") pod \"machine-config-server-l6tpb\" (UID: \"c1c7d9e5-2214-41f9-ba56-7843592e8865\") " pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.026993 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsv6p\" (UniqueName: \"kubernetes.io/projected/1aa96598-89c0-44a3-be99-13cdef9b84dc-kube-api-access-qsv6p\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027010 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee68fc51-c244-479c-9c83-ed3b8137a2e6-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-gdtf7\" (UID: \"ee68fc51-c244-479c-9c83-ed3b8137a2e6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027069 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rx2cg\" (UniqueName: \"kubernetes.io/projected/e6623318-d2a9-4015-b310-96a7506f61f9-kube-api-access-rx2cg\") pod \"collect-profiles-29399115-rt4z5\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027120 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2019d3e-6460-4c69-b9cb-afdad2426d6a-config\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027136 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhsbf\" (UniqueName: \"kubernetes.io/projected/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-kube-api-access-lhsbf\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027152 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e9851e18-35a0-465e-9bdf-e180e4576679-profile-collector-cert\") pod \"olm-operator-6b444d44fb-xtvfv\" (UID: \"e9851e18-35a0-465e-9bdf-e180e4576679\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027216 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e6623318-d2a9-4015-b310-96a7506f61f9-secret-volume\") pod \"collect-profiles-29399115-rt4z5\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027233 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8f4eb729-3801-47bc-a56f-e20d72aa89f8-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-4hrgd\" (UID: \"8f4eb729-3801-47bc-a56f-e20d72aa89f8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027284 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c1c7d9e5-2214-41f9-ba56-7843592e8865-node-bootstrap-token\") pod \"machine-config-server-l6tpb\" (UID: \"c1c7d9e5-2214-41f9-ba56-7843592e8865\") " pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027311 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1aa96598-89c0-44a3-be99-13cdef9b84dc-trusted-ca\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027328 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p68gd\" (UniqueName: \"kubernetes.io/projected/ad829316-549b-4b0d-8e17-ded37a000f66-kube-api-access-p68gd\") pod \"openshift-config-operator-7777fb866f-hqh6f\" (UID: \"ad829316-549b-4b0d-8e17-ded37a000f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027365 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d8f7852c-96d0-405c-aa5d-f301690cc0c3-cert\") pod \"ingress-canary-mzvv7\" (UID: \"d8f7852c-96d0-405c-aa5d-f301690cc0c3\") " pod="openshift-ingress-canary/ingress-canary-mzvv7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027389 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee68fc51-c244-479c-9c83-ed3b8137a2e6-config\") pod \"kube-controller-manager-operator-78b949d7b-gdtf7\" (UID: \"ee68fc51-c244-479c-9c83-ed3b8137a2e6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027430 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjkn9\" (UniqueName: \"kubernetes.io/projected/f1b73c0e-66fa-44dc-8349-59fd960d422a-kube-api-access-zjkn9\") pod \"dns-default-g92v4\" (UID: \"f1b73c0e-66fa-44dc-8349-59fd960d422a\") " pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027459 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee68fc51-c244-479c-9c83-ed3b8137a2e6-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-gdtf7\" (UID: \"ee68fc51-c244-479c-9c83-ed3b8137a2e6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027475 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-plugins-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027492 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2ca81542-2eef-4099-92bd-301845e4d3c8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027510 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fdm8\" (UniqueName: \"kubernetes.io/projected/3ffaa46d-e7a6-401c-abfc-5b1d8600de1c-kube-api-access-8fdm8\") pod \"multus-admission-controller-857f4d67dd-57p55\" (UID: \"3ffaa46d-e7a6-401c-abfc-5b1d8600de1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027524 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-socket-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027552 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2019d3e-6460-4c69-b9cb-afdad2426d6a-trusted-ca\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027597 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrd5w\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-kube-api-access-rrd5w\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027639 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-csi-data-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027680 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-certificates\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027697 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e633b0f5-f0c5-44ec-adfb-2d724924031a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7tgn4\" (UID: \"e633b0f5-f0c5-44ec-adfb-2d724924031a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027712 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-service-ca-bundle\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027752 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2019d3e-6460-4c69-b9cb-afdad2426d6a-serving-cert\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027768 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/082646ba-8822-4ced-acfa-0e7ba97612a5-signing-cabundle\") pod \"service-ca-9c57cc56f-rn5m4\" (UID: \"082646ba-8822-4ced-acfa-0e7ba97612a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027783 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vtcm\" (UniqueName: \"kubernetes.io/projected/b0af14ce-aaea-427f-bf89-15c1f4090ee7-kube-api-access-6vtcm\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.027823 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p97tn\" (UniqueName: \"kubernetes.io/projected/85241300-e898-4330-8da8-577ddc45c251-kube-api-access-p97tn\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028303 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d8dc4b77-8b76-4646-b593-dcc3c1b66403-profile-collector-cert\") pod \"catalog-operator-68c6474976-2v27w\" (UID: \"d8dc4b77-8b76-4646-b593-dcc3c1b66403\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028348 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-bound-sa-token\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028364 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n66cn\" (UniqueName: \"kubernetes.io/projected/8f4eb729-3801-47bc-a56f-e20d72aa89f8-kube-api-access-n66cn\") pod \"package-server-manager-789f6589d5-4hrgd\" (UID: \"8f4eb729-3801-47bc-a56f-e20d72aa89f8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028381 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/85241300-e898-4330-8da8-577ddc45c251-webhook-cert\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028422 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2c25c73b-9396-44da-8e6d-67af776335e5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rxc25\" (UID: \"2c25c73b-9396-44da-8e6d-67af776335e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028438 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdvcj\" (UniqueName: \"kubernetes.io/projected/7c537b75-83c8-4250-aae0-cacbdb94445f-kube-api-access-sdvcj\") pod \"marketplace-operator-79b997595-x78dk\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028453 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-mountpoint-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028492 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-tls\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028510 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-registration-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028531 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2ca81542-2eef-4099-92bd-301845e4d3c8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028578 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f1b73c0e-66fa-44dc-8349-59fd960d422a-metrics-tls\") pod \"dns-default-g92v4\" (UID: \"f1b73c0e-66fa-44dc-8349-59fd960d422a\") " pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028596 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14de58c0-137f-407e-af53-0ee2bdf5468b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-h4hv2\" (UID: \"14de58c0-137f-407e-af53-0ee2bdf5468b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028639 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ffaa46d-e7a6-401c-abfc-5b1d8600de1c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-57p55\" (UID: \"3ffaa46d-e7a6-401c-abfc-5b1d8600de1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028660 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sxqc\" (UniqueName: \"kubernetes.io/projected/d8dc4b77-8b76-4646-b593-dcc3c1b66403-kube-api-access-2sxqc\") pod \"catalog-operator-68c6474976-2v27w\" (UID: \"d8dc4b77-8b76-4646-b593-dcc3c1b66403\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.028754 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vv2c\" (UniqueName: \"kubernetes.io/projected/6e0b27d4-4d58-466c-9beb-2bb75b9513d5-kube-api-access-9vv2c\") pod \"migrator-59844c95c7-qtkb8\" (UID: \"6e0b27d4-4d58-466c-9beb-2bb75b9513d5\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029043 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/082646ba-8822-4ced-acfa-0e7ba97612a5-signing-key\") pod \"service-ca-9c57cc56f-rn5m4\" (UID: \"082646ba-8822-4ced-acfa-0e7ba97612a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029075 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/67b21633-2061-4642-b7b0-e784b17bebfe-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b7wq9\" (UID: \"67b21633-2061-4642-b7b0-e784b17bebfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029116 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e633b0f5-f0c5-44ec-adfb-2d724924031a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7tgn4\" (UID: \"e633b0f5-f0c5-44ec-adfb-2d724924031a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029138 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e9851e18-35a0-465e-9bdf-e180e4576679-srv-cert\") pod \"olm-operator-6b444d44fb-xtvfv\" (UID: \"e9851e18-35a0-465e-9bdf-e180e4576679\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029158 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e633b0f5-f0c5-44ec-adfb-2d724924031a-config\") pod \"kube-apiserver-operator-766d6c64bb-7tgn4\" (UID: \"e633b0f5-f0c5-44ec-adfb-2d724924031a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029207 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1aa96598-89c0-44a3-be99-13cdef9b84dc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029226 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l69vz\" (UniqueName: \"kubernetes.io/projected/d8f7852c-96d0-405c-aa5d-f301690cc0c3-kube-api-access-l69vz\") pod \"ingress-canary-mzvv7\" (UID: \"d8f7852c-96d0-405c-aa5d-f301690cc0c3\") " pod="openshift-ingress-canary/ingress-canary-mzvv7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029244 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-stats-auth\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029283 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-x78dk\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029298 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/85241300-e898-4330-8da8-577ddc45c251-tmpfs\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029367 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ad829316-549b-4b0d-8e17-ded37a000f66-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hqh6f\" (UID: \"ad829316-549b-4b0d-8e17-ded37a000f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029385 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-x78dk\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029399 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6623318-d2a9-4015-b310-96a7506f61f9-config-volume\") pod \"collect-profiles-29399115-rt4z5\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029461 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp94x\" (UniqueName: \"kubernetes.io/projected/e9851e18-35a0-465e-9bdf-e180e4576679-kube-api-access-wp94x\") pod \"olm-operator-6b444d44fb-xtvfv\" (UID: \"e9851e18-35a0-465e-9bdf-e180e4576679\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029477 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb8q5\" (UniqueName: \"kubernetes.io/projected/2c25c73b-9396-44da-8e6d-67af776335e5-kube-api-access-lb8q5\") pod \"machine-config-controller-84d6567774-rxc25\" (UID: \"2c25c73b-9396-44da-8e6d-67af776335e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029591 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1b73c0e-66fa-44dc-8349-59fd960d422a-config-volume\") pod \"dns-default-g92v4\" (UID: \"f1b73c0e-66fa-44dc-8349-59fd960d422a\") " pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029633 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14de58c0-137f-407e-af53-0ee2bdf5468b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-h4hv2\" (UID: \"14de58c0-137f-407e-af53-0ee2bdf5468b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029661 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5dfp\" (UniqueName: \"kubernetes.io/projected/67b21633-2061-4642-b7b0-e784b17bebfe-kube-api-access-j5dfp\") pod \"cluster-samples-operator-665b6dd947-b7wq9\" (UID: \"67b21633-2061-4642-b7b0-e784b17bebfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029677 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d8dc4b77-8b76-4646-b593-dcc3c1b66403-srv-cert\") pod \"catalog-operator-68c6474976-2v27w\" (UID: \"d8dc4b77-8b76-4646-b593-dcc3c1b66403\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029933 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpxq4\" (UniqueName: \"kubernetes.io/projected/082646ba-8822-4ced-acfa-0e7ba97612a5-kube-api-access-tpxq4\") pod \"service-ca-9c57cc56f-rn5m4\" (UID: \"082646ba-8822-4ced-acfa-0e7ba97612a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.029959 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.030121 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ad829316-549b-4b0d-8e17-ded37a000f66-serving-cert\") pod \"openshift-config-operator-7777fb866f-hqh6f\" (UID: \"ad829316-549b-4b0d-8e17-ded37a000f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.030442 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1aa96598-89c0-44a3-be99-13cdef9b84dc-metrics-tls\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.030463 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-default-certificate\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.030503 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-trusted-ca\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.030520 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gslft\" (UniqueName: \"kubernetes.io/projected/c1c7d9e5-2214-41f9-ba56-7843592e8865-kube-api-access-gslft\") pod \"machine-config-server-l6tpb\" (UID: \"c1c7d9e5-2214-41f9-ba56-7843592e8865\") " pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.030537 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2c25c73b-9396-44da-8e6d-67af776335e5-proxy-tls\") pod \"machine-config-controller-84d6567774-rxc25\" (UID: \"2c25c73b-9396-44da-8e6d-67af776335e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.030618 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqwfz\" (UniqueName: \"kubernetes.io/projected/14de58c0-137f-407e-af53-0ee2bdf5468b-kube-api-access-wqwfz\") pod \"kube-storage-version-migrator-operator-b67b599dd-h4hv2\" (UID: \"14de58c0-137f-407e-af53-0ee2bdf5468b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.037727 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2019d3e-6460-4c69-b9cb-afdad2426d6a-config\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.039516 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.040107 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2ca81542-2eef-4099-92bd-301845e4d3c8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.041658 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ad829316-549b-4b0d-8e17-ded37a000f66-available-featuregates\") pod \"openshift-config-operator-7777fb866f-hqh6f\" (UID: \"ad829316-549b-4b0d-8e17-ded37a000f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.044806 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1aa96598-89c0-44a3-be99-13cdef9b84dc-trusted-ca\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.044998 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:10.544976351 +0000 UTC m=+135.231041852 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.046693 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d8dc4b77-8b76-4646-b593-dcc3c1b66403-profile-collector-cert\") pod \"catalog-operator-68c6474976-2v27w\" (UID: \"d8dc4b77-8b76-4646-b593-dcc3c1b66403\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.048498 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2019d3e-6460-4c69-b9cb-afdad2426d6a-trusted-ca\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.048811 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e9851e18-35a0-465e-9bdf-e180e4576679-profile-collector-cert\") pod \"olm-operator-6b444d44fb-xtvfv\" (UID: \"e9851e18-35a0-465e-9bdf-e180e4576679\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.048945 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-trusted-ca\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.050686 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-certificates\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.055687 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d8dc4b77-8b76-4646-b593-dcc3c1b66403-srv-cert\") pod \"catalog-operator-68c6474976-2v27w\" (UID: \"d8dc4b77-8b76-4646-b593-dcc3c1b66403\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.061469 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ad829316-549b-4b0d-8e17-ded37a000f66-serving-cert\") pod \"openshift-config-operator-7777fb866f-hqh6f\" (UID: \"ad829316-549b-4b0d-8e17-ded37a000f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.070788 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-tls\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.073079 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2ca81542-2eef-4099-92bd-301845e4d3c8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.074673 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1aa96598-89c0-44a3-be99-13cdef9b84dc-metrics-tls\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.074682 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e9851e18-35a0-465e-9bdf-e180e4576679-srv-cert\") pod \"olm-operator-6b444d44fb-xtvfv\" (UID: \"e9851e18-35a0-465e-9bdf-e180e4576679\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.077101 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2019d3e-6460-4c69-b9cb-afdad2426d6a-serving-cert\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.077108 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/67b21633-2061-4642-b7b0-e784b17bebfe-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b7wq9\" (UID: \"67b21633-2061-4642-b7b0-e784b17bebfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.078963 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xtpxd"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.085835 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p68gd\" (UniqueName: \"kubernetes.io/projected/ad829316-549b-4b0d-8e17-ded37a000f66-kube-api-access-p68gd\") pod \"openshift-config-operator-7777fb866f-hqh6f\" (UID: \"ad829316-549b-4b0d-8e17-ded37a000f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.088649 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rxk8\" (UniqueName: \"kubernetes.io/projected/d2019d3e-6460-4c69-b9cb-afdad2426d6a-kube-api-access-6rxk8\") pod \"console-operator-58897d9998-p4sx7\" (UID: \"d2019d3e-6460-4c69-b9cb-afdad2426d6a\") " pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.119493 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsv6p\" (UniqueName: \"kubernetes.io/projected/1aa96598-89c0-44a3-be99-13cdef9b84dc-kube-api-access-qsv6p\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131114 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131361 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c1c7d9e5-2214-41f9-ba56-7843592e8865-node-bootstrap-token\") pod \"machine-config-server-l6tpb\" (UID: \"c1c7d9e5-2214-41f9-ba56-7843592e8865\") " pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131408 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d8f7852c-96d0-405c-aa5d-f301690cc0c3-cert\") pod \"ingress-canary-mzvv7\" (UID: \"d8f7852c-96d0-405c-aa5d-f301690cc0c3\") " pod="openshift-ingress-canary/ingress-canary-mzvv7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131433 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee68fc51-c244-479c-9c83-ed3b8137a2e6-config\") pod \"kube-controller-manager-operator-78b949d7b-gdtf7\" (UID: \"ee68fc51-c244-479c-9c83-ed3b8137a2e6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131457 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjkn9\" (UniqueName: \"kubernetes.io/projected/f1b73c0e-66fa-44dc-8349-59fd960d422a-kube-api-access-zjkn9\") pod \"dns-default-g92v4\" (UID: \"f1b73c0e-66fa-44dc-8349-59fd960d422a\") " pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131483 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee68fc51-c244-479c-9c83-ed3b8137a2e6-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-gdtf7\" (UID: \"ee68fc51-c244-479c-9c83-ed3b8137a2e6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131511 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-plugins-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131540 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fdm8\" (UniqueName: \"kubernetes.io/projected/3ffaa46d-e7a6-401c-abfc-5b1d8600de1c-kube-api-access-8fdm8\") pod \"multus-admission-controller-857f4d67dd-57p55\" (UID: \"3ffaa46d-e7a6-401c-abfc-5b1d8600de1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131562 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-socket-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131626 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-csi-data-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131651 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e633b0f5-f0c5-44ec-adfb-2d724924031a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7tgn4\" (UID: \"e633b0f5-f0c5-44ec-adfb-2d724924031a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131676 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-service-ca-bundle\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131699 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/082646ba-8822-4ced-acfa-0e7ba97612a5-signing-cabundle\") pod \"service-ca-9c57cc56f-rn5m4\" (UID: \"082646ba-8822-4ced-acfa-0e7ba97612a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131725 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vtcm\" (UniqueName: \"kubernetes.io/projected/b0af14ce-aaea-427f-bf89-15c1f4090ee7-kube-api-access-6vtcm\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131752 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p97tn\" (UniqueName: \"kubernetes.io/projected/85241300-e898-4330-8da8-577ddc45c251-kube-api-access-p97tn\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131786 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n66cn\" (UniqueName: \"kubernetes.io/projected/8f4eb729-3801-47bc-a56f-e20d72aa89f8-kube-api-access-n66cn\") pod \"package-server-manager-789f6589d5-4hrgd\" (UID: \"8f4eb729-3801-47bc-a56f-e20d72aa89f8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131810 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/85241300-e898-4330-8da8-577ddc45c251-webhook-cert\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131834 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2c25c73b-9396-44da-8e6d-67af776335e5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rxc25\" (UID: \"2c25c73b-9396-44da-8e6d-67af776335e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131861 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-mountpoint-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131886 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdvcj\" (UniqueName: \"kubernetes.io/projected/7c537b75-83c8-4250-aae0-cacbdb94445f-kube-api-access-sdvcj\") pod \"marketplace-operator-79b997595-x78dk\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131911 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-registration-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131935 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f1b73c0e-66fa-44dc-8349-59fd960d422a-metrics-tls\") pod \"dns-default-g92v4\" (UID: \"f1b73c0e-66fa-44dc-8349-59fd960d422a\") " pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131957 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14de58c0-137f-407e-af53-0ee2bdf5468b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-h4hv2\" (UID: \"14de58c0-137f-407e-af53-0ee2bdf5468b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.131978 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ffaa46d-e7a6-401c-abfc-5b1d8600de1c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-57p55\" (UID: \"3ffaa46d-e7a6-401c-abfc-5b1d8600de1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132007 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vv2c\" (UniqueName: \"kubernetes.io/projected/6e0b27d4-4d58-466c-9beb-2bb75b9513d5-kube-api-access-9vv2c\") pod \"migrator-59844c95c7-qtkb8\" (UID: \"6e0b27d4-4d58-466c-9beb-2bb75b9513d5\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132033 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/082646ba-8822-4ced-acfa-0e7ba97612a5-signing-key\") pod \"service-ca-9c57cc56f-rn5m4\" (UID: \"082646ba-8822-4ced-acfa-0e7ba97612a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132067 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e633b0f5-f0c5-44ec-adfb-2d724924031a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7tgn4\" (UID: \"e633b0f5-f0c5-44ec-adfb-2d724924031a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132091 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e633b0f5-f0c5-44ec-adfb-2d724924031a-config\") pod \"kube-apiserver-operator-766d6c64bb-7tgn4\" (UID: \"e633b0f5-f0c5-44ec-adfb-2d724924031a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132119 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l69vz\" (UniqueName: \"kubernetes.io/projected/d8f7852c-96d0-405c-aa5d-f301690cc0c3-kube-api-access-l69vz\") pod \"ingress-canary-mzvv7\" (UID: \"d8f7852c-96d0-405c-aa5d-f301690cc0c3\") " pod="openshift-ingress-canary/ingress-canary-mzvv7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132141 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-stats-auth\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132164 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/85241300-e898-4330-8da8-577ddc45c251-tmpfs\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132185 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-x78dk\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132222 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-x78dk\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132246 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6623318-d2a9-4015-b310-96a7506f61f9-config-volume\") pod \"collect-profiles-29399115-rt4z5\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132277 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb8q5\" (UniqueName: \"kubernetes.io/projected/2c25c73b-9396-44da-8e6d-67af776335e5-kube-api-access-lb8q5\") pod \"machine-config-controller-84d6567774-rxc25\" (UID: \"2c25c73b-9396-44da-8e6d-67af776335e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132321 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1b73c0e-66fa-44dc-8349-59fd960d422a-config-volume\") pod \"dns-default-g92v4\" (UID: \"f1b73c0e-66fa-44dc-8349-59fd960d422a\") " pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132341 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14de58c0-137f-407e-af53-0ee2bdf5468b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-h4hv2\" (UID: \"14de58c0-137f-407e-af53-0ee2bdf5468b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132381 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpxq4\" (UniqueName: \"kubernetes.io/projected/082646ba-8822-4ced-acfa-0e7ba97612a5-kube-api-access-tpxq4\") pod \"service-ca-9c57cc56f-rn5m4\" (UID: \"082646ba-8822-4ced-acfa-0e7ba97612a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132407 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-default-certificate\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132430 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gslft\" (UniqueName: \"kubernetes.io/projected/c1c7d9e5-2214-41f9-ba56-7843592e8865-kube-api-access-gslft\") pod \"machine-config-server-l6tpb\" (UID: \"c1c7d9e5-2214-41f9-ba56-7843592e8865\") " pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132452 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2c25c73b-9396-44da-8e6d-67af776335e5-proxy-tls\") pod \"machine-config-controller-84d6567774-rxc25\" (UID: \"2c25c73b-9396-44da-8e6d-67af776335e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132477 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqwfz\" (UniqueName: \"kubernetes.io/projected/14de58c0-137f-407e-af53-0ee2bdf5468b-kube-api-access-wqwfz\") pod \"kube-storage-version-migrator-operator-b67b599dd-h4hv2\" (UID: \"14de58c0-137f-407e-af53-0ee2bdf5468b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132514 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/85241300-e898-4330-8da8-577ddc45c251-apiservice-cert\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132549 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-metrics-certs\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132573 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c1c7d9e5-2214-41f9-ba56-7843592e8865-certs\") pod \"machine-config-server-l6tpb\" (UID: \"c1c7d9e5-2214-41f9-ba56-7843592e8865\") " pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132598 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee68fc51-c244-479c-9c83-ed3b8137a2e6-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-gdtf7\" (UID: \"ee68fc51-c244-479c-9c83-ed3b8137a2e6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132702 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rx2cg\" (UniqueName: \"kubernetes.io/projected/e6623318-d2a9-4015-b310-96a7506f61f9-kube-api-access-rx2cg\") pod \"collect-profiles-29399115-rt4z5\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132739 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhsbf\" (UniqueName: \"kubernetes.io/projected/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-kube-api-access-lhsbf\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132767 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e6623318-d2a9-4015-b310-96a7506f61f9-secret-volume\") pod \"collect-profiles-29399115-rt4z5\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.132791 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8f4eb729-3801-47bc-a56f-e20d72aa89f8-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-4hrgd\" (UID: \"8f4eb729-3801-47bc-a56f-e20d72aa89f8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.133460 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:10.633439423 +0000 UTC m=+135.319504924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.137255 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-mountpoint-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.137769 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/082646ba-8822-4ced-acfa-0e7ba97612a5-signing-cabundle\") pod \"service-ca-9c57cc56f-rn5m4\" (UID: \"082646ba-8822-4ced-acfa-0e7ba97612a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.142075 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee68fc51-c244-479c-9c83-ed3b8137a2e6-config\") pod \"kube-controller-manager-operator-78b949d7b-gdtf7\" (UID: \"ee68fc51-c244-479c-9c83-ed3b8137a2e6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.142373 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-plugins-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.143696 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14de58c0-137f-407e-af53-0ee2bdf5468b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-h4hv2\" (UID: \"14de58c0-137f-407e-af53-0ee2bdf5468b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.144580 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-registration-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.145302 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-csi-data-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.145382 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b0af14ce-aaea-427f-bf89-15c1f4090ee7-socket-dir\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.148461 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrd5w\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-kube-api-access-rrd5w\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.149283 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-service-ca-bundle\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.149311 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2c25c73b-9396-44da-8e6d-67af776335e5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rxc25\" (UID: \"2c25c73b-9396-44da-8e6d-67af776335e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.149710 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/85241300-e898-4330-8da8-577ddc45c251-tmpfs\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.150746 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c1c7d9e5-2214-41f9-ba56-7843592e8865-node-bootstrap-token\") pod \"machine-config-server-l6tpb\" (UID: \"c1c7d9e5-2214-41f9-ba56-7843592e8865\") " pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.152175 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e633b0f5-f0c5-44ec-adfb-2d724924031a-config\") pod \"kube-apiserver-operator-766d6c64bb-7tgn4\" (UID: \"e633b0f5-f0c5-44ec-adfb-2d724924031a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.153616 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sxqc\" (UniqueName: \"kubernetes.io/projected/d8dc4b77-8b76-4646-b593-dcc3c1b66403-kube-api-access-2sxqc\") pod \"catalog-operator-68c6474976-2v27w\" (UID: \"d8dc4b77-8b76-4646-b593-dcc3c1b66403\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.161182 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f1b73c0e-66fa-44dc-8349-59fd960d422a-metrics-tls\") pod \"dns-default-g92v4\" (UID: \"f1b73c0e-66fa-44dc-8349-59fd960d422a\") " pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.161648 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6623318-d2a9-4015-b310-96a7506f61f9-config-volume\") pod \"collect-profiles-29399115-rt4z5\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.161911 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/082646ba-8822-4ced-acfa-0e7ba97612a5-signing-key\") pod \"service-ca-9c57cc56f-rn5m4\" (UID: \"082646ba-8822-4ced-acfa-0e7ba97612a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.161997 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-x78dk\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.162087 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1b73c0e-66fa-44dc-8349-59fd960d422a-config-volume\") pod \"dns-default-g92v4\" (UID: \"f1b73c0e-66fa-44dc-8349-59fd960d422a\") " pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.164487 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2c25c73b-9396-44da-8e6d-67af776335e5-proxy-tls\") pod \"machine-config-controller-84d6567774-rxc25\" (UID: \"2c25c73b-9396-44da-8e6d-67af776335e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.165132 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d8f7852c-96d0-405c-aa5d-f301690cc0c3-cert\") pod \"ingress-canary-mzvv7\" (UID: \"d8f7852c-96d0-405c-aa5d-f301690cc0c3\") " pod="openshift-ingress-canary/ingress-canary-mzvv7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.166001 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ffaa46d-e7a6-401c-abfc-5b1d8600de1c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-57p55\" (UID: \"3ffaa46d-e7a6-401c-abfc-5b1d8600de1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.166107 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14de58c0-137f-407e-af53-0ee2bdf5468b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-h4hv2\" (UID: \"14de58c0-137f-407e-af53-0ee2bdf5468b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.166177 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-default-certificate\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.166187 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/85241300-e898-4330-8da8-577ddc45c251-apiservice-cert\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.166255 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8f4eb729-3801-47bc-a56f-e20d72aa89f8-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-4hrgd\" (UID: \"8f4eb729-3801-47bc-a56f-e20d72aa89f8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.166416 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-stats-auth\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.166568 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/85241300-e898-4330-8da8-577ddc45c251-webhook-cert\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.167239 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e633b0f5-f0c5-44ec-adfb-2d724924031a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7tgn4\" (UID: \"e633b0f5-f0c5-44ec-adfb-2d724924031a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.168876 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c1c7d9e5-2214-41f9-ba56-7843592e8865-certs\") pod \"machine-config-server-l6tpb\" (UID: \"c1c7d9e5-2214-41f9-ba56-7843592e8865\") " pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.175784 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e6623318-d2a9-4015-b310-96a7506f61f9-secret-volume\") pod \"collect-profiles-29399115-rt4z5\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.184038 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp94x\" (UniqueName: \"kubernetes.io/projected/e9851e18-35a0-465e-9bdf-e180e4576679-kube-api-access-wp94x\") pod \"olm-operator-6b444d44fb-xtvfv\" (UID: \"e9851e18-35a0-465e-9bdf-e180e4576679\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.184842 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-x78dk\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.185652 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-metrics-certs\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.185699 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee68fc51-c244-479c-9c83-ed3b8137a2e6-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-gdtf7\" (UID: \"ee68fc51-c244-479c-9c83-ed3b8137a2e6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.197157 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-bound-sa-token\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.240575 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.241208 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:10.741194043 +0000 UTC m=+135.427259544 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.248557 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1aa96598-89c0-44a3-be99-13cdef9b84dc-bound-sa-token\") pod \"ingress-operator-5b745b69d9-trrhl\" (UID: \"1aa96598-89c0-44a3-be99-13cdef9b84dc\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.255470 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5dfp\" (UniqueName: \"kubernetes.io/projected/67b21633-2061-4642-b7b0-e784b17bebfe-kube-api-access-j5dfp\") pod \"cluster-samples-operator-665b6dd947-b7wq9\" (UID: \"67b21633-2061-4642-b7b0-e784b17bebfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.264304 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vv2c\" (UniqueName: \"kubernetes.io/projected/6e0b27d4-4d58-466c-9beb-2bb75b9513d5-kube-api-access-9vv2c\") pod \"migrator-59844c95c7-qtkb8\" (UID: \"6e0b27d4-4d58-466c-9beb-2bb75b9513d5\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.285726 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vtcm\" (UniqueName: \"kubernetes.io/projected/b0af14ce-aaea-427f-bf89-15c1f4090ee7-kube-api-access-6vtcm\") pod \"csi-hostpathplugin-mqx68\" (UID: \"b0af14ce-aaea-427f-bf89-15c1f4090ee7\") " pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.295905 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-mqx68" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.309726 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p97tn\" (UniqueName: \"kubernetes.io/projected/85241300-e898-4330-8da8-577ddc45c251-kube-api-access-p97tn\") pod \"packageserver-d55dfcdfc-cw2tr\" (UID: \"85241300-e898-4330-8da8-577ddc45c251\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.334956 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n66cn\" (UniqueName: \"kubernetes.io/projected/8f4eb729-3801-47bc-a56f-e20d72aa89f8-kube-api-access-n66cn\") pod \"package-server-manager-789f6589d5-4hrgd\" (UID: \"8f4eb729-3801-47bc-a56f-e20d72aa89f8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.343122 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.343293 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:10.843272691 +0000 UTC m=+135.529338192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.343858 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.344243 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:10.84422937 +0000 UTC m=+135.530294871 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.350248 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fdm8\" (UniqueName: \"kubernetes.io/projected/3ffaa46d-e7a6-401c-abfc-5b1d8600de1c-kube-api-access-8fdm8\") pod \"multus-admission-controller-857f4d67dd-57p55\" (UID: \"3ffaa46d-e7a6-401c-abfc-5b1d8600de1c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.355728 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.364561 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.368467 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.371094 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjkn9\" (UniqueName: \"kubernetes.io/projected/f1b73c0e-66fa-44dc-8349-59fd960d422a-kube-api-access-zjkn9\") pod \"dns-default-g92v4\" (UID: \"f1b73c0e-66fa-44dc-8349-59fd960d422a\") " pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.396543 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee68fc51-c244-479c-9c83-ed3b8137a2e6-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-gdtf7\" (UID: \"ee68fc51-c244-479c-9c83-ed3b8137a2e6\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.398813 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.402955 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.411912 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdvcj\" (UniqueName: \"kubernetes.io/projected/7c537b75-83c8-4250-aae0-cacbdb94445f-kube-api-access-sdvcj\") pod \"marketplace-operator-79b997595-x78dk\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.425757 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.431081 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gslft\" (UniqueName: \"kubernetes.io/projected/c1c7d9e5-2214-41f9-ba56-7843592e8865-kube-api-access-gslft\") pod \"machine-config-server-l6tpb\" (UID: \"c1c7d9e5-2214-41f9-ba56-7843592e8865\") " pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.444205 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.444827 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.445343 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.445520 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:10.945499313 +0000 UTC m=+135.631564814 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.445518 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-snkpv"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.448548 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.457051 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.465827 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpxq4\" (UniqueName: \"kubernetes.io/projected/082646ba-8822-4ced-acfa-0e7ba97612a5-kube-api-access-tpxq4\") pod \"service-ca-9c57cc56f-rn5m4\" (UID: \"082646ba-8822-4ced-acfa-0e7ba97612a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.471009 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqwfz\" (UniqueName: \"kubernetes.io/projected/14de58c0-137f-407e-af53-0ee2bdf5468b-kube-api-access-wqwfz\") pod \"kube-storage-version-migrator-operator-b67b599dd-h4hv2\" (UID: \"14de58c0-137f-407e-af53-0ee2bdf5468b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.474325 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.483698 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.493629 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" Nov 24 01:15:10 crc kubenswrapper[4755]: W1124 01:15:10.497923 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa143983_92e8_480e_9bb3_928892077000.slice/crio-9f99990879409e81d77b48e34128c17c2046282b21b1ac74e5bcd184cac478aa WatchSource:0}: Error finding container 9f99990879409e81d77b48e34128c17c2046282b21b1ac74e5bcd184cac478aa: Status 404 returned error can't find the container with id 9f99990879409e81d77b48e34128c17c2046282b21b1ac74e5bcd184cac478aa Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.498585 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l69vz\" (UniqueName: \"kubernetes.io/projected/d8f7852c-96d0-405c-aa5d-f301690cc0c3-kube-api-access-l69vz\") pod \"ingress-canary-mzvv7\" (UID: \"d8f7852c-96d0-405c-aa5d-f301690cc0c3\") " pod="openshift-ingress-canary/ingress-canary-mzvv7" Nov 24 01:15:10 crc kubenswrapper[4755]: W1124 01:15:10.505751 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47cac8d4_7293_45b3_9142_7717f79f973b.slice/crio-84143d6b1a966a81a9dd5dab5d5eb1111c608e6c1bf3bfa231ef5e5dd9ea01f2 WatchSource:0}: Error finding container 84143d6b1a966a81a9dd5dab5d5eb1111c608e6c1bf3bfa231ef5e5dd9ea01f2: Status 404 returned error can't find the container with id 84143d6b1a966a81a9dd5dab5d5eb1111c608e6c1bf3bfa231ef5e5dd9ea01f2 Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.508148 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.508731 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rx2cg\" (UniqueName: \"kubernetes.io/projected/e6623318-d2a9-4015-b310-96a7506f61f9-kube-api-access-rx2cg\") pod \"collect-profiles-29399115-rt4z5\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.516990 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.533710 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhsbf\" (UniqueName: \"kubernetes.io/projected/b2977dd9-b5c2-40cd-bcd7-12f91d2edf31-kube-api-access-lhsbf\") pod \"router-default-5444994796-nfhp7\" (UID: \"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31\") " pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.533902 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.539713 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.543996 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mqx68"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.547317 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.548625 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.549489 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.049471408 +0000 UTC m=+135.735536909 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.551721 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e633b0f5-f0c5-44ec-adfb-2d724924031a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7tgn4\" (UID: \"e633b0f5-f0c5-44ec-adfb-2d724924031a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.559030 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.562493 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.570712 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-mzvv7" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.571383 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb8q5\" (UniqueName: \"kubernetes.io/projected/2c25c73b-9396-44da-8e6d-67af776335e5-kube-api-access-lb8q5\") pod \"machine-config-controller-84d6567774-rxc25\" (UID: \"2c25c73b-9396-44da-8e6d-67af776335e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.578372 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-l6tpb" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.602142 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.651353 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.651553 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.151523994 +0000 UTC m=+135.837589515 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.652222 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.652787 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.152753072 +0000 UTC m=+135.838818573 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.660743 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xr447"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.683500 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.711704 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.735511 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mqx68" event={"ID":"b0af14ce-aaea-427f-bf89-15c1f4090ee7","Type":"ContainerStarted","Data":"7b8dd0edbeb95069a6ad8afbb8a4b352c65f7fa153ff27a904acff03ae01106c"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.740333 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" event={"ID":"0e541e12-dcdd-4753-9607-282590cbd898","Type":"ContainerStarted","Data":"2d61adf9c24fdbe051cc039fdca3c0e2cd25826b2b523c95e91064a444fafbfa"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.740385 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" event={"ID":"0e541e12-dcdd-4753-9607-282590cbd898","Type":"ContainerStarted","Data":"1aadcd14c23bcd489101ded341e9162e5f46bb72e2a25073e5954eefd51cde53"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.741131 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-p4sx7"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.753753 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.754132 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.254117288 +0000 UTC m=+135.940182789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.759538 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.772733 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" event={"ID":"c55b3456-832f-47a0-90fc-383747422868","Type":"ContainerStarted","Data":"e179efe65cb9dcde51b6ec346096dfe20bd6f522f632f4ab9682775339193f1f"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.773056 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" event={"ID":"c55b3456-832f-47a0-90fc-383747422868","Type":"ContainerStarted","Data":"110cf16851c11ec0896caf4d366ba21623f293a0546adfe11f353f04c9c704d9"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.782147 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" event={"ID":"47cac8d4-7293-45b3-9142-7717f79f973b","Type":"ContainerStarted","Data":"84143d6b1a966a81a9dd5dab5d5eb1111c608e6c1bf3bfa231ef5e5dd9ea01f2"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.787148 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.787525 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" event={"ID":"f9511d1a-d4c3-48e5-add0-a09b3543f768","Type":"ContainerStarted","Data":"ec464231e26f2f4840036f61522935fb94ca49246cf168286a3ecdd53727e8b9"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.794954 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" event={"ID":"1d3d34ab-42fa-4eaf-98fc-247a14f1231e","Type":"ContainerStarted","Data":"77339f5ddc21498419e19836b95ca278c3e444b734cfff8799962756f8d0cffe"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.794984 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" event={"ID":"1d3d34ab-42fa-4eaf-98fc-247a14f1231e","Type":"ContainerStarted","Data":"9290be86ce5809424fb894609372fbb54449149624792ae158b302a7d2af25b1"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.796976 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" event={"ID":"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9","Type":"ContainerStarted","Data":"698661027e1b6c2dcb3b2643d7ad17aa0233c444955f82f0c32752c7d7512ec3"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.797005 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" event={"ID":"2eb0eb3e-5d9d-421e-b33e-253f66f88ea9","Type":"ContainerStarted","Data":"7ab28ba51e27dfc3756edd5d2172088995f7e77071a08b4cf6ec511b5aad7e43"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.800243 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" event={"ID":"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8","Type":"ContainerStarted","Data":"7d00c25c1095cb12f7a49b857e83d14eb540b0633b4878a1705c9a295f39dd99"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.800272 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" event={"ID":"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8","Type":"ContainerStarted","Data":"5bea381c248e511e39c6c0529bbbc38a26970d8a045ed4106626230c7e561b0d"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.800665 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.801537 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" event={"ID":"3de6425b-7697-42d0-8b32-0a6e91078e9f","Type":"ContainerStarted","Data":"d2404787264bd1afe1dcac498bbc21ac6db93c938b012de7a7293d8e37ccb9e3"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.806696 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" event={"ID":"644dc770-f6b6-4300-a800-642311c680bf","Type":"ContainerStarted","Data":"18f65ba4b2d91b752813b34c84f2facc9783a572d50af255b5b137f1ae09fd6f"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.806726 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" event={"ID":"644dc770-f6b6-4300-a800-642311c680bf","Type":"ContainerStarted","Data":"2b5dc08db73867deca0f550ee59adcd5ad88a24ae7b51821b12d90d844636ec2"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.808429 4755 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-2qz86 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.808476 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" podUID="1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.809562 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vnvq4" event={"ID":"d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4","Type":"ContainerStarted","Data":"69285e0cbb9c40ec6590cf6bc70fa489f9778a4ef8b49f01ab92140f6dc37be4"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.809594 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vnvq4" event={"ID":"d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4","Type":"ContainerStarted","Data":"83edf1bc04e0a870a20d9471593d3c1555e2961d859c0ec7c23ad83fe6a23b58"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.809717 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-vnvq4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.811916 4755 generic.go:334] "Generic (PLEG): container finished" podID="06b2b494-6e14-4fd2-8fc8-5b491090adaa" containerID="f6b5f6168039b7007fa3e1fa2dd72f7e80c742b5e1e5cdfd5baed1bb93cadd54" exitCode=0 Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.811970 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" event={"ID":"06b2b494-6e14-4fd2-8fc8-5b491090adaa","Type":"ContainerDied","Data":"f6b5f6168039b7007fa3e1fa2dd72f7e80c742b5e1e5cdfd5baed1bb93cadd54"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.811991 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" event={"ID":"06b2b494-6e14-4fd2-8fc8-5b491090adaa","Type":"ContainerStarted","Data":"1a4b917b1f2b4a6cbf9ca3be656100dea2330c7c46d0d1e84c1dbff09fdb87b6"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.813285 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-mlls8" event={"ID":"65c5f11b-931e-4dc2-8c3e-c7180b94ec08","Type":"ContainerStarted","Data":"98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.813319 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-mlls8" event={"ID":"65c5f11b-931e-4dc2-8c3e-c7180b94ec08","Type":"ContainerStarted","Data":"9ddbc9c26d2db07f6aeac1a684db3dad2c1fa644de7e15e20e9ee00bb7c79f40"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.821620 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" event={"ID":"fceb51f9-deec-4840-86d0-a67228819bef","Type":"ContainerStarted","Data":"2cbf833a1f247db8a27ee66a663d8b0ac8b4e8e9f720d6b705a5d834f896ea09"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.821652 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" event={"ID":"fceb51f9-deec-4840-86d0-a67228819bef","Type":"ContainerStarted","Data":"353a2bba47e9f98f0b4ca26401d8529e5f98cc2ebde68f1ae7e0814fcc9363a0"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.823119 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.823402 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" event={"ID":"fa143983-92e8-480e-9bb3-928892077000","Type":"ContainerStarted","Data":"9f99990879409e81d77b48e34128c17c2046282b21b1ac74e5bcd184cac478aa"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.827087 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" event={"ID":"85fb9244-9754-4924-b53c-51ccbf6a5220","Type":"ContainerStarted","Data":"1edaa6e0504d8818e4a5ab2990280a35c8a4a20408a381ceb10c5909fd536f25"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.827126 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" event={"ID":"85fb9244-9754-4924-b53c-51ccbf6a5220","Type":"ContainerStarted","Data":"75ba5053a2b44c334ec6cc7cec9061199ce5e57a8486fc662bbf9272a09749a0"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.827294 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.830253 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" event={"ID":"2b4041a5-964c-4bd3-8723-a45e5d6ca9be","Type":"ContainerStarted","Data":"bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.830285 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" event={"ID":"2b4041a5-964c-4bd3-8723-a45e5d6ca9be","Type":"ContainerStarted","Data":"5d9e7fac9e75cc784bcb96cd3bb86cd8cdc1d9cd47d3ac30a8a99c676add9262"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.830366 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.835012 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" event={"ID":"77b33d59-46dc-45ae-9ded-cbd3918f79fd","Type":"ContainerStarted","Data":"3a9d3b674ed43e78c1ff3afa76a75332da3ac6282231d0ab441506dd0129f4b0"} Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.861464 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.864114 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.362548269 +0000 UTC m=+136.048613860 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.867873 4755 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-pnvtd container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.6:6443/healthz\": dial tcp 10.217.0.6:6443: connect: connection refused" start-of-body= Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.867919 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" podUID="2b4041a5-964c-4bd3-8723-a45e5d6ca9be" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.6:6443/healthz\": dial tcp 10.217.0.6:6443: connect: connection refused" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.867949 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-vnvq4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.867995 4755 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-4gmv8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.868005 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vnvq4" podUID="d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.868061 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" podUID="85fb9244-9754-4924-b53c-51ccbf6a5220" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Nov 24 01:15:10 crc kubenswrapper[4755]: W1124 01:15:10.884568 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a130010_6476_46e9_be91_f9040275be7b.slice/crio-c70a14da8783b1d463e5f29484160c9f53c01842dd5053e324c2a1841345a7c2 WatchSource:0}: Error finding container c70a14da8783b1d463e5f29484160c9f53c01842dd5053e324c2a1841345a7c2: Status 404 returned error can't find the container with id c70a14da8783b1d463e5f29484160c9f53c01842dd5053e324c2a1841345a7c2 Nov 24 01:15:10 crc kubenswrapper[4755]: W1124 01:15:10.886472 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2977dd9_b5c2_40cd_bcd7_12f91d2edf31.slice/crio-d7353bd5dd833ed4bbb11ad9ad5c98b9944432fb5a686d85473b92e7f460dc20 WatchSource:0}: Error finding container d7353bd5dd833ed4bbb11ad9ad5c98b9944432fb5a686d85473b92e7f460dc20: Status 404 returned error can't find the container with id d7353bd5dd833ed4bbb11ad9ad5c98b9944432fb5a686d85473b92e7f460dc20 Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.931877 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl"] Nov 24 01:15:10 crc kubenswrapper[4755]: I1124 01:15:10.962237 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:10 crc kubenswrapper[4755]: E1124 01:15:10.975704 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.475681964 +0000 UTC m=+136.161747465 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.055050 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.067635 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.068050 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.568033274 +0000 UTC m=+136.254098775 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.098746 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.138360 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.168734 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.169122 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.669101451 +0000 UTC m=+136.355166952 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.271916 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.272655 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.772640173 +0000 UTC m=+136.458705674 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.304394 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.305992 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.374645 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.374848 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.874810973 +0000 UTC m=+136.560876464 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.374917 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.375207 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:11.875200245 +0000 UTC m=+136.561265746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.502231 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.502373 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.002350428 +0000 UTC m=+136.688415939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.509729 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.510414 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.010393474 +0000 UTC m=+136.696458975 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.517452 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-g92v4"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.540331 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.606866 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-57p55"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.612666 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.612974 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.112960516 +0000 UTC m=+136.799026017 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.612998 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rn5m4"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.618199 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-x78dk"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.623693 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.713996 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.714368 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.214354042 +0000 UTC m=+136.900419583 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.762316 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.774923 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.783074 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-mzvv7"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.814596 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.814750 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.314724888 +0000 UTC m=+137.000790389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.815070 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.815504 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.315488361 +0000 UTC m=+137.001553862 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.857335 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nfhp7" event={"ID":"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31","Type":"ContainerStarted","Data":"368be974b1e9b2640dc46661bf21bc9b8c6409b1ed83c5c98485294677305e19"} Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.857385 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nfhp7" event={"ID":"b2977dd9-b5c2-40cd-bcd7-12f91d2edf31","Type":"ContainerStarted","Data":"d7353bd5dd833ed4bbb11ad9ad5c98b9944432fb5a686d85473b92e7f460dc20"} Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.880791 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" event={"ID":"ee68fc51-c244-479c-9c83-ed3b8137a2e6","Type":"ContainerStarted","Data":"84765681a8ab2be334e732fc679a5bc59f32bf9742f389370317a77cfdcd5250"} Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.916701 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" event={"ID":"ad829316-549b-4b0d-8e17-ded37a000f66","Type":"ContainerStarted","Data":"c4b387a50a7da6648ebe367496f358ffb7f00e2871d78aa4a8b53b125a51a2b9"} Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.916981 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:11 crc kubenswrapper[4755]: E1124 01:15:11.917418 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.417399193 +0000 UTC m=+137.103464694 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.924636 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25"] Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.935532 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" event={"ID":"e633b0f5-f0c5-44ec-adfb-2d724924031a","Type":"ContainerStarted","Data":"89603e7c686da92ac352edf38e12fd0ae3a1671cb027ca7e03cdcef6ab39b60b"} Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.966017 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" event={"ID":"0a130010-6476-46e9-be91-f9040275be7b","Type":"ContainerStarted","Data":"a0a5bbf69f06e13e4f28c7c529b3d2fe12ce9ebdc81fd14f8bbd0e4bcacdc50c"} Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.966069 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" event={"ID":"0a130010-6476-46e9-be91-f9040275be7b","Type":"ContainerStarted","Data":"c70a14da8783b1d463e5f29484160c9f53c01842dd5053e324c2a1841345a7c2"} Nov 24 01:15:11 crc kubenswrapper[4755]: I1124 01:15:11.987904 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" event={"ID":"14de58c0-137f-407e-af53-0ee2bdf5468b","Type":"ContainerStarted","Data":"c1ba527728da73fba25b150bb39960928cb70e581668c8fa745cd7d0748a6ed3"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.018684 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.019732 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.519720428 +0000 UTC m=+137.205785929 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.076443 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-2b47p" podStartSLOduration=117.07641588 podStartE2EDuration="1m57.07641588s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.027461844 +0000 UTC m=+136.713527335" watchObservedRunningTime="2025-11-24 01:15:12.07641588 +0000 UTC m=+136.762481381" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.085839 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-nfhp7" podStartSLOduration=117.085816587 podStartE2EDuration="1m57.085816587s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.068780136 +0000 UTC m=+136.754845637" watchObservedRunningTime="2025-11-24 01:15:12.085816587 +0000 UTC m=+136.771882088" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.109689 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-9f7kc" podStartSLOduration=117.109669245 podStartE2EDuration="1m57.109669245s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.10591585 +0000 UTC m=+136.791981361" watchObservedRunningTime="2025-11-24 01:15:12.109669245 +0000 UTC m=+136.795734746" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.122805 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.124583 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.62455526 +0000 UTC m=+137.310620761 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.169254 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" podStartSLOduration=117.169225784 podStartE2EDuration="1m57.169225784s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.166262263 +0000 UTC m=+136.852327764" watchObservedRunningTime="2025-11-24 01:15:12.169225784 +0000 UTC m=+136.855291285" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.189089 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tvwrx" podStartSLOduration=117.18907083 podStartE2EDuration="1m57.18907083s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.18775986 +0000 UTC m=+136.873825371" watchObservedRunningTime="2025-11-24 01:15:12.18907083 +0000 UTC m=+136.875136331" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.189315 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" event={"ID":"77b33d59-46dc-45ae-9ded-cbd3918f79fd","Type":"ContainerStarted","Data":"e8c11c392ad8d3c3f2ec2879ea647b5881c49f4dca3af287ded227c887b9b533"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.189357 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" event={"ID":"77b33d59-46dc-45ae-9ded-cbd3918f79fd","Type":"ContainerStarted","Data":"278fcc8dc2d10030af9a30bff9dcc640ce251fc6d469848afd0912495393894f"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.189371 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" event={"ID":"47cac8d4-7293-45b3-9142-7717f79f973b","Type":"ContainerStarted","Data":"7eedb52ef3400f1619ea33f0a7ebaf92d79603b9fd92473e7333a9688f40a7eb"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.189380 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" event={"ID":"e9851e18-35a0-465e-9bdf-e180e4576679","Type":"ContainerStarted","Data":"5f48542bc0beb967f109bbc077f2877a306b1618b8903e96c44485c159aafb59"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.189391 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" event={"ID":"7c537b75-83c8-4250-aae0-cacbdb94445f","Type":"ContainerStarted","Data":"1b304e9a19313c3668d09b2aeae6e6f78fb2f043ff3ae5ba8b979e474877ffba"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.189400 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" event={"ID":"f9511d1a-d4c3-48e5-add0-a09b3543f768","Type":"ContainerStarted","Data":"fb0dee47186a4dee3e7796593dbf6ea4d36f2e2332d71b0828f52adcafeeb513"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.189409 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" event={"ID":"67b21633-2061-4642-b7b0-e784b17bebfe","Type":"ContainerStarted","Data":"557af451eb98d3955425962b707661ba3240dd3ac3310f2f485491a32496d87e"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.193255 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-p4sx7" event={"ID":"d2019d3e-6460-4c69-b9cb-afdad2426d6a","Type":"ContainerStarted","Data":"366e8495a649ed21e09dca7d61c76a6f6aaa106d93bbdbdf7c427150a235dd2d"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.193285 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-p4sx7" event={"ID":"d2019d3e-6460-4c69-b9cb-afdad2426d6a","Type":"ContainerStarted","Data":"933b56f0723eacd375e615131d5099f6ffa5eb242343eeeff4cca989769661d2"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.194104 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.199565 4755 patch_prober.go:28] interesting pod/console-operator-58897d9998-p4sx7 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.199631 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-p4sx7" podUID="d2019d3e-6460-4c69-b9cb-afdad2426d6a" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.226035 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.226931 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.726918086 +0000 UTC m=+137.412983587 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.236342 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" event={"ID":"fa143983-92e8-480e-9bb3-928892077000","Type":"ContainerStarted","Data":"f2f34818042da04a73e46f483e4cdbc91383a28aede3af12d3070b79cc946aed"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.263694 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8" event={"ID":"6e0b27d4-4d58-466c-9beb-2bb75b9513d5","Type":"ContainerStarted","Data":"d59edd1c48c5091faebeebcb9bf6643c9d400e29171e96a111eb0df5a305c1ee"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.265007 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" event={"ID":"85241300-e898-4330-8da8-577ddc45c251","Type":"ContainerStarted","Data":"87b6782639dd237b77067a865706300536156d559750648f6cfac4893a48c9c1"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.269768 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6zk9r" podStartSLOduration=117.269751534 podStartE2EDuration="1m57.269751534s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.268753463 +0000 UTC m=+136.954818974" watchObservedRunningTime="2025-11-24 01:15:12.269751534 +0000 UTC m=+136.955817035" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.314048 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-vnvq4" podStartSLOduration=117.314028936 podStartE2EDuration="1m57.314028936s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.3089142 +0000 UTC m=+136.994979711" watchObservedRunningTime="2025-11-24 01:15:12.314028936 +0000 UTC m=+137.000094437" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.328899 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.329734 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.829719975 +0000 UTC m=+137.515785476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.396694 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" podStartSLOduration=117.39667565 podStartE2EDuration="1m57.39667565s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.395913917 +0000 UTC m=+137.081979418" watchObservedRunningTime="2025-11-24 01:15:12.39667565 +0000 UTC m=+137.082741151" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.398174 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cnqln" podStartSLOduration=117.397969999 podStartE2EDuration="1m57.397969999s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.364176477 +0000 UTC m=+137.050241988" watchObservedRunningTime="2025-11-24 01:15:12.397969999 +0000 UTC m=+137.084035500" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.406663 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" event={"ID":"8f4eb729-3801-47bc-a56f-e20d72aa89f8","Type":"ContainerStarted","Data":"225272f8fb8f6327737c5b2370d5f632ad51ee964de11ca22fe1192e974505bc"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.432670 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.433124 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:12.933110643 +0000 UTC m=+137.619176154 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.439518 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" event={"ID":"3ffaa46d-e7a6-401c-abfc-5b1d8600de1c","Type":"ContainerStarted","Data":"afe33317c15972a35648c75b551e92e2292fa4e7187481ffcf62bd4f391a7116"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.461162 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" event={"ID":"d8dc4b77-8b76-4646-b593-dcc3c1b66403","Type":"ContainerStarted","Data":"5ef1eccb79995fa370961bbdf59833cc2a3a67108b4299e9a31e4a9d807ca22d"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.461205 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" event={"ID":"d8dc4b77-8b76-4646-b593-dcc3c1b66403","Type":"ContainerStarted","Data":"431c7a05905bfc601d81f1e8dc88e3609a11a31f4c10b639373663c6d0d4834b"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.462467 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.463261 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" podStartSLOduration=117.463248723 podStartE2EDuration="1m57.463248723s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.427901574 +0000 UTC m=+137.113967085" watchObservedRunningTime="2025-11-24 01:15:12.463248723 +0000 UTC m=+137.149314224" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.495338 4755 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-2v27w container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.495371 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" podUID="d8dc4b77-8b76-4646-b593-dcc3c1b66403" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.495533 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-mlls8" podStartSLOduration=117.495523599 podStartE2EDuration="1m57.495523599s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.463678396 +0000 UTC m=+137.149743907" watchObservedRunningTime="2025-11-24 01:15:12.495523599 +0000 UTC m=+137.181589100" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.496902 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bwdn" podStartSLOduration=117.496896221 podStartE2EDuration="1m57.496896221s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.495212409 +0000 UTC m=+137.181277910" watchObservedRunningTime="2025-11-24 01:15:12.496896221 +0000 UTC m=+137.182961722" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.521284 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" event={"ID":"d274950d-dc6c-424c-b87e-a7b6e88b6092","Type":"ContainerStarted","Data":"8680aa0e24989c970b684badecdd9c6848ee6b1fa19a9b0d0476f61b6da269d6"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.521318 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" event={"ID":"d274950d-dc6c-424c-b87e-a7b6e88b6092","Type":"ContainerStarted","Data":"e42f767e6e6b4c3cab9a16522791ac877566d6b9ba842b914299741955b9ba9e"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.522665 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-l6tpb" event={"ID":"c1c7d9e5-2214-41f9-ba56-7843592e8865","Type":"ContainerStarted","Data":"4fefd2c7fe7c9a76d1b22a957ece33e024a354877d00fbea5742b9bda5238c37"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.529514 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-g92v4" event={"ID":"f1b73c0e-66fa-44dc-8349-59fd960d422a","Type":"ContainerStarted","Data":"bbd00f744a4353e2f9e9061e17544d6bfba78230badaf0942e11b2ad21442038"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.537051 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.537116 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.037102199 +0000 UTC m=+137.723167690 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.537267 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.538199 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.038186782 +0000 UTC m=+137.724252283 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.538387 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.538579 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.538621 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.550322 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-xtpxd" podStartSLOduration=117.550304762 podStartE2EDuration="1m57.550304762s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.549512947 +0000 UTC m=+137.235578448" watchObservedRunningTime="2025-11-24 01:15:12.550304762 +0000 UTC m=+137.236370263" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.596399 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" event={"ID":"1aa96598-89c0-44a3-be99-13cdef9b84dc","Type":"ContainerStarted","Data":"17600ecddfd68645af2a0e7e4238a3d532301be98077b6e81536cf97ff0153ac"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.612334 4755 generic.go:334] "Generic (PLEG): container finished" podID="3de6425b-7697-42d0-8b32-0a6e91078e9f" containerID="10166cb9d17bf975e8fc8b26f551ac57546c3c73751166b2eb043baa879fc94f" exitCode=0 Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.613035 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" event={"ID":"3de6425b-7697-42d0-8b32-0a6e91078e9f","Type":"ContainerDied","Data":"10166cb9d17bf975e8fc8b26f551ac57546c3c73751166b2eb043baa879fc94f"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.642986 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.643540 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.143525429 +0000 UTC m=+137.829590930 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.687956 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" event={"ID":"082646ba-8822-4ced-acfa-0e7ba97612a5","Type":"ContainerStarted","Data":"49b9b7b828732449df60593a7e97ceb0c16cf42df53f38d5c52d53004885cf11"} Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.689562 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-vnvq4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.689614 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vnvq4" podUID="d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.689754 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" podStartSLOduration=117.689717949 podStartE2EDuration="1m57.689717949s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.649907713 +0000 UTC m=+137.335973214" watchObservedRunningTime="2025-11-24 01:15:12.689717949 +0000 UTC m=+137.375783470" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.690160 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-l6tpb" podStartSLOduration=5.690156483 podStartE2EDuration="5.690156483s" podCreationTimestamp="2025-11-24 01:15:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.683064696 +0000 UTC m=+137.369130197" watchObservedRunningTime="2025-11-24 01:15:12.690156483 +0000 UTC m=+137.376221984" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.706868 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.707109 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.726490 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-p4sx7" podStartSLOduration=117.726475662 podStartE2EDuration="1m57.726475662s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.725451421 +0000 UTC m=+137.411516932" watchObservedRunningTime="2025-11-24 01:15:12.726475662 +0000 UTC m=+137.412541163" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.745026 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.758418 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.258398877 +0000 UTC m=+137.944464378 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.764372 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjcfn" podStartSLOduration=117.764356919 podStartE2EDuration="1m57.764356919s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.760820661 +0000 UTC m=+137.446886162" watchObservedRunningTime="2025-11-24 01:15:12.764356919 +0000 UTC m=+137.450422410" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.846903 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.847241 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.347226529 +0000 UTC m=+138.033292030 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.871422 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6qvk7" podStartSLOduration=117.871404708 podStartE2EDuration="1m57.871404708s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.868349775 +0000 UTC m=+137.554415296" watchObservedRunningTime="2025-11-24 01:15:12.871404708 +0000 UTC m=+137.557470209" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.920360 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s445k" podStartSLOduration=117.920339082 podStartE2EDuration="1m57.920339082s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.904528209 +0000 UTC m=+137.590593740" watchObservedRunningTime="2025-11-24 01:15:12.920339082 +0000 UTC m=+137.606404583" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.924842 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xr447" podStartSLOduration=117.924815278 podStartE2EDuration="1m57.924815278s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:12.92423393 +0000 UTC m=+137.610299441" watchObservedRunningTime="2025-11-24 01:15:12.924815278 +0000 UTC m=+137.610880789" Nov 24 01:15:12 crc kubenswrapper[4755]: I1124 01:15:12.948643 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:12 crc kubenswrapper[4755]: E1124 01:15:12.948966 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.448956245 +0000 UTC m=+138.135021746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.049350 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:13 crc kubenswrapper[4755]: E1124 01:15:13.049650 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.54963502 +0000 UTC m=+138.235700511 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.134888 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.150375 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:13 crc kubenswrapper[4755]: E1124 01:15:13.150673 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.650661675 +0000 UTC m=+138.336727176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.254096 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:13 crc kubenswrapper[4755]: E1124 01:15:13.254417 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.754403493 +0000 UTC m=+138.440468994 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.356462 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:13 crc kubenswrapper[4755]: E1124 01:15:13.357082 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.857070689 +0000 UTC m=+138.543136190 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.483080 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:13 crc kubenswrapper[4755]: E1124 01:15:13.483243 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.983225791 +0000 UTC m=+138.669291292 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.483302 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:13 crc kubenswrapper[4755]: E1124 01:15:13.483539 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:13.983531081 +0000 UTC m=+138.669596582 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.546801 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:13 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:13 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:13 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.546857 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.588145 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:13 crc kubenswrapper[4755]: E1124 01:15:13.589111 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:14.089096385 +0000 UTC m=+138.775161886 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.689412 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:13 crc kubenswrapper[4755]: E1124 01:15:13.689803 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:14.18979149 +0000 UTC m=+138.875856991 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.719446 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" event={"ID":"47cac8d4-7293-45b3-9142-7717f79f973b","Type":"ContainerStarted","Data":"a30dbe015f5606d90fd85ef6d2434562cf52d60873d68305ef284c187acc98fe"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.721208 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mqx68" event={"ID":"b0af14ce-aaea-427f-bf89-15c1f4090ee7","Type":"ContainerStarted","Data":"993cf188a98c70f283f8f13dd9f5e4b23e40644c04f17dbc42c1cfcacfb23e44"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.722045 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" event={"ID":"e9851e18-35a0-465e-9bdf-e180e4576679","Type":"ContainerStarted","Data":"0ef6957a8e1eec2dcdbb19c4086dc2eb9a27d3d931de9033402c730e3a559842"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.722435 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.723824 4755 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-xtvfv container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.723881 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" podUID="e9851e18-35a0-465e-9bdf-e180e4576679" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.734404 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" event={"ID":"3ffaa46d-e7a6-401c-abfc-5b1d8600de1c","Type":"ContainerStarted","Data":"ae104c53a217dd656da89ce153046775dab0acd0a46407f65a2e4112b91a2dc3"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.784456 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-snkpv" podStartSLOduration=118.78443719 podStartE2EDuration="1m58.78443719s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:13.744558522 +0000 UTC m=+138.430624023" watchObservedRunningTime="2025-11-24 01:15:13.78443719 +0000 UTC m=+138.470502691" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.787194 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" event={"ID":"7c537b75-83c8-4250-aae0-cacbdb94445f","Type":"ContainerStarted","Data":"46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.787984 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.794971 4755 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-x78dk container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.795027 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" podUID="7c537b75-83c8-4250-aae0-cacbdb94445f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.795710 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:13 crc kubenswrapper[4755]: E1124 01:15:13.796948 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:14.296931642 +0000 UTC m=+138.982997143 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.815171 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" event={"ID":"06b2b494-6e14-4fd2-8fc8-5b491090adaa","Type":"ContainerStarted","Data":"c474e804ce68f64e52c3a9a21e16e7b25f2c92ccd908bee558972d7f3e516aca"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.815211 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" event={"ID":"06b2b494-6e14-4fd2-8fc8-5b491090adaa","Type":"ContainerStarted","Data":"4ad6a7b537a11251b60f65ef1494bcf47793ad056fa3f37fe5f1ef98536e827c"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.816479 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" podStartSLOduration=118.816462388 podStartE2EDuration="1m58.816462388s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:13.81619279 +0000 UTC m=+138.502258301" watchObservedRunningTime="2025-11-24 01:15:13.816462388 +0000 UTC m=+138.502527889" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.816803 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" podStartSLOduration=118.816797878 podStartE2EDuration="1m58.816797878s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:13.78739728 +0000 UTC m=+138.473462781" watchObservedRunningTime="2025-11-24 01:15:13.816797878 +0000 UTC m=+138.502863379" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.831467 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-g92v4" event={"ID":"f1b73c0e-66fa-44dc-8349-59fd960d422a","Type":"ContainerStarted","Data":"e2d283388c5ea65bb38aba33fd7b95af3456184ca5bf9f61a57622483546d1ce"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.831516 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-g92v4" event={"ID":"f1b73c0e-66fa-44dc-8349-59fd960d422a","Type":"ContainerStarted","Data":"2c8bf93b42e42092ecfe29b91adce5b50626ce0f2d5e44c34e53cae56af0cb0f"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.832191 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.835387 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" event={"ID":"85241300-e898-4330-8da8-577ddc45c251","Type":"ContainerStarted","Data":"f878db1eb18ef72f9b5aa8c5bd5ded0c3e2b8f862cc38ace2dd07f0cf24cf44c"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.836172 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.838201 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" event={"ID":"082646ba-8822-4ced-acfa-0e7ba97612a5","Type":"ContainerStarted","Data":"751df54445ed8b3d2f2211db4743257bf450947b3985e8fac6412959f588fbf3"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.842513 4755 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-cw2tr container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" start-of-body= Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.842742 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" podUID="85241300-e898-4330-8da8-577ddc45c251" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.843114 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-mzvv7" event={"ID":"d8f7852c-96d0-405c-aa5d-f301690cc0c3","Type":"ContainerStarted","Data":"1b2623c19df684cf2cfb8b7e620f7607313d320a00f49527fe41e85d5630ce2e"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.843214 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-mzvv7" event={"ID":"d8f7852c-96d0-405c-aa5d-f301690cc0c3","Type":"ContainerStarted","Data":"bc0c7a9572bf5825dfd361a56bd36952f1c0b36539a86ced836679ee53d441b1"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.845796 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" podStartSLOduration=118.845784214 podStartE2EDuration="1m58.845784214s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:13.844126263 +0000 UTC m=+138.530191764" watchObservedRunningTime="2025-11-24 01:15:13.845784214 +0000 UTC m=+138.531849715" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.850145 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" event={"ID":"ee68fc51-c244-479c-9c83-ed3b8137a2e6","Type":"ContainerStarted","Data":"6fcdc5bcc55b3fc52b4235c97c270e1a829bcb41d292527f9881575927d2d364"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.877989 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8" event={"ID":"6e0b27d4-4d58-466c-9beb-2bb75b9513d5","Type":"ContainerStarted","Data":"f771f520ef3b478de0bafc2bfa559efd141aea008bd668ae92e0e9bcd05f1b67"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.878033 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8" event={"ID":"6e0b27d4-4d58-466c-9beb-2bb75b9513d5","Type":"ContainerStarted","Data":"e0d699b3d97e56df9de10bcc258c755850d31bf50b6554a7577b4d569957c816"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.897797 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:13 crc kubenswrapper[4755]: E1124 01:15:13.898234 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:14.398217395 +0000 UTC m=+139.084282896 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.898565 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" event={"ID":"3de6425b-7697-42d0-8b32-0a6e91078e9f","Type":"ContainerStarted","Data":"c18422c7c0e87bc80e3131391522fab9414765da6d0004854639167104c101cf"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.911742 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-mzvv7" podStartSLOduration=6.911726868 podStartE2EDuration="6.911726868s" podCreationTimestamp="2025-11-24 01:15:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:13.906958512 +0000 UTC m=+138.593024013" watchObservedRunningTime="2025-11-24 01:15:13.911726868 +0000 UTC m=+138.597792369" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.912938 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" podStartSLOduration=118.912932294 podStartE2EDuration="1m58.912932294s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:13.873958084 +0000 UTC m=+138.560023595" watchObservedRunningTime="2025-11-24 01:15:13.912932294 +0000 UTC m=+138.598997795" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.950979 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-g92v4" podStartSLOduration=6.950957095 podStartE2EDuration="6.950957095s" podCreationTimestamp="2025-11-24 01:15:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:13.948374297 +0000 UTC m=+138.634439798" watchObservedRunningTime="2025-11-24 01:15:13.950957095 +0000 UTC m=+138.637022806" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.973192 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" event={"ID":"67b21633-2061-4642-b7b0-e784b17bebfe","Type":"ContainerStarted","Data":"96e788cc65c4cb60840168a04b62f5b4f001a44d7db05fd378a4352ae1227b17"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.973248 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" event={"ID":"67b21633-2061-4642-b7b0-e784b17bebfe","Type":"ContainerStarted","Data":"6733552cea9bb1e0fd0420a4ad1e09f4d4b45f9309a22ca461c60e57ae7451ce"} Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.978538 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-rn5m4" podStartSLOduration=118.978522847 podStartE2EDuration="1m58.978522847s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:13.971215874 +0000 UTC m=+138.657281375" watchObservedRunningTime="2025-11-24 01:15:13.978522847 +0000 UTC m=+138.664588348" Nov 24 01:15:13 crc kubenswrapper[4755]: I1124 01:15:13.999158 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:14 crc kubenswrapper[4755]: E1124 01:15:14.000498 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:14.500481908 +0000 UTC m=+139.186547419 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.003119 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" podStartSLOduration=119.003105338 podStartE2EDuration="1m59.003105338s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:14.002938023 +0000 UTC m=+138.689003534" watchObservedRunningTime="2025-11-24 01:15:14.003105338 +0000 UTC m=+138.689170839" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.017671 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" event={"ID":"2c25c73b-9396-44da-8e6d-67af776335e5","Type":"ContainerStarted","Data":"c6b95d757cf85c9234295de58464651c6f2c2f4253c42f31aa30dd79e8c8a41d"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.017710 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" event={"ID":"2c25c73b-9396-44da-8e6d-67af776335e5","Type":"ContainerStarted","Data":"58c87370b75320aec86c50d3c3a1270ff903fdabd7adf9a2835b324a13ad3dbb"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.017721 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" event={"ID":"2c25c73b-9396-44da-8e6d-67af776335e5","Type":"ContainerStarted","Data":"66feef6af2112dd7fd81f65d624852b87a62407620e9503ea6568aeb90494b4c"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.018665 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" event={"ID":"e6623318-d2a9-4015-b310-96a7506f61f9","Type":"ContainerStarted","Data":"1b41606d191efd72dfabdf8a9615d0ad8b4816b30272819a227c28f1e9382fa3"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.018689 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" event={"ID":"e6623318-d2a9-4015-b310-96a7506f61f9","Type":"ContainerStarted","Data":"efc99f8231508c8fb7a853d1b90f8a1d4696d7f826c058d4f0d9b4800a23b81e"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.020461 4755 generic.go:334] "Generic (PLEG): container finished" podID="ad829316-549b-4b0d-8e17-ded37a000f66" containerID="bb35d252260e3c52bb5e930a84a2d42db62a48071474dfa56ac8a1c9dd9d57e0" exitCode=0 Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.020575 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" event={"ID":"ad829316-549b-4b0d-8e17-ded37a000f66","Type":"ContainerDied","Data":"bb35d252260e3c52bb5e930a84a2d42db62a48071474dfa56ac8a1c9dd9d57e0"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.048679 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" event={"ID":"e633b0f5-f0c5-44ec-adfb-2d724924031a","Type":"ContainerStarted","Data":"2d55f7ebeb177ecf65212c953073c8eaa6ac2a3e9f1bfec2bd0391ad24dacf63"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.067965 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-gdtf7" podStartSLOduration=119.067951448 podStartE2EDuration="1m59.067951448s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:14.06700561 +0000 UTC m=+138.753071111" watchObservedRunningTime="2025-11-24 01:15:14.067951448 +0000 UTC m=+138.754016949" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.068333 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-l6tpb" event={"ID":"c1c7d9e5-2214-41f9-ba56-7843592e8865","Type":"ContainerStarted","Data":"de9b90d4886c650240819edba5ce3bc69967df01fb9ad077e8bce7057840f657"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.068480 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qtkb8" podStartSLOduration=119.068476054 podStartE2EDuration="1m59.068476054s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:14.040898322 +0000 UTC m=+138.726963823" watchObservedRunningTime="2025-11-24 01:15:14.068476054 +0000 UTC m=+138.754541555" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.083595 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" event={"ID":"1aa96598-89c0-44a3-be99-13cdef9b84dc","Type":"ContainerStarted","Data":"6b231e289c12fd87cc399c9d4651cd1bdaf6d3cb2ee3871b5bca10bd29c9c887"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.083679 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" event={"ID":"1aa96598-89c0-44a3-be99-13cdef9b84dc","Type":"ContainerStarted","Data":"c35b04f7508f8b75931a3508738e5b521f3d6e56e236bfd165bb242d3627b44d"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.101296 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:14 crc kubenswrapper[4755]: E1124 01:15:14.101575 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:14.601564315 +0000 UTC m=+139.287629816 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.106973 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" event={"ID":"8f4eb729-3801-47bc-a56f-e20d72aa89f8","Type":"ContainerStarted","Data":"b05f2fcc1c0fd9d3a26596da71ca5f5113ab23632e2d02f906ef5b71ec0c3913"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.107023 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" event={"ID":"8f4eb729-3801-47bc-a56f-e20d72aa89f8","Type":"ContainerStarted","Data":"88f86b826a60d1217f3f649bc2109d14b8aac55c0ff3c20aa2ac98502edd9959"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.107669 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.161696 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" event={"ID":"14de58c0-137f-407e-af53-0ee2bdf5468b","Type":"ContainerStarted","Data":"02ef40551ea4056dbd66b4e1e4c246d231babdcc9b5364ba10d85425ce80f492"} Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.162808 4755 patch_prober.go:28] interesting pod/console-operator-58897d9998-p4sx7 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.162857 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-p4sx7" podUID="d2019d3e-6460-4c69-b9cb-afdad2426d6a" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.185090 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7tgn4" podStartSLOduration=119.185073505 podStartE2EDuration="1m59.185073505s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:14.182824647 +0000 UTC m=+138.868890148" watchObservedRunningTime="2025-11-24 01:15:14.185073505 +0000 UTC m=+138.871139006" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.185986 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rxc25" podStartSLOduration=119.185979173 podStartE2EDuration="1m59.185979173s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:14.119465562 +0000 UTC m=+138.805531063" watchObservedRunningTime="2025-11-24 01:15:14.185979173 +0000 UTC m=+138.872044684" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.202114 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:14 crc kubenswrapper[4755]: E1124 01:15:14.203889 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:14.703868829 +0000 UTC m=+139.389934330 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.237570 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2v27w" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.251835 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b7wq9" podStartSLOduration=119.251810993 podStartE2EDuration="1m59.251810993s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:14.250822693 +0000 UTC m=+138.936888194" watchObservedRunningTime="2025-11-24 01:15:14.251810993 +0000 UTC m=+138.937876494" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.287813 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.287856 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.291745 4755 patch_prober.go:28] interesting pod/apiserver-76f77b778f-vmwlz container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.9:8443/livez\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.291792 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" podUID="06b2b494-6e14-4fd2-8fc8-5b491090adaa" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.9:8443/livez\": dial tcp 10.217.0.9:8443: connect: connection refused" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.305769 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:14 crc kubenswrapper[4755]: E1124 01:15:14.307837 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:14.807821954 +0000 UTC m=+139.493887455 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.340551 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" podStartSLOduration=14.340535263 podStartE2EDuration="14.340535263s" podCreationTimestamp="2025-11-24 01:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:14.340354588 +0000 UTC m=+139.026420099" watchObservedRunningTime="2025-11-24 01:15:14.340535263 +0000 UTC m=+139.026600764" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.365356 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" podStartSLOduration=119.365340911 podStartE2EDuration="1m59.365340911s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:14.362623248 +0000 UTC m=+139.048688759" watchObservedRunningTime="2025-11-24 01:15:14.365340911 +0000 UTC m=+139.051406412" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.408477 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:14 crc kubenswrapper[4755]: E1124 01:15:14.408916 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:14.908895791 +0000 UTC m=+139.594961302 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.455859 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-h4hv2" podStartSLOduration=119.455840064 podStartE2EDuration="1m59.455840064s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:14.45406004 +0000 UTC m=+139.140125541" watchObservedRunningTime="2025-11-24 01:15:14.455840064 +0000 UTC m=+139.141905565" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.506838 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-trrhl" podStartSLOduration=119.506820601 podStartE2EDuration="1m59.506820601s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:14.505704107 +0000 UTC m=+139.191769618" watchObservedRunningTime="2025-11-24 01:15:14.506820601 +0000 UTC m=+139.192886102" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.510564 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:14 crc kubenswrapper[4755]: E1124 01:15:14.510941 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.010926367 +0000 UTC m=+139.696991868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.545100 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:14 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:14 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:14 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.545162 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.611459 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:14 crc kubenswrapper[4755]: E1124 01:15:14.611806 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.111791817 +0000 UTC m=+139.797857318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.713418 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:14 crc kubenswrapper[4755]: E1124 01:15:14.713814 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.213802002 +0000 UTC m=+139.899867503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.783186 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.783245 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.785495 4755 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-cslz2 container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.32:8443/livez\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.785572 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" podUID="3de6425b-7697-42d0-8b32-0a6e91078e9f" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.32:8443/livez\": dial tcp 10.217.0.32:8443: connect: connection refused" Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.814329 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:14 crc kubenswrapper[4755]: E1124 01:15:14.814722 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.314704604 +0000 UTC m=+140.000770105 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:14 crc kubenswrapper[4755]: I1124 01:15:14.915429 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:14 crc kubenswrapper[4755]: E1124 01:15:14.915699 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.415687568 +0000 UTC m=+140.101753069 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.016782 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.016986 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.516959 +0000 UTC m=+140.203024501 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.017084 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.017390 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.517376393 +0000 UTC m=+140.203441904 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.118488 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.118709 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.618679557 +0000 UTC m=+140.304745058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.118842 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.119150 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.619139041 +0000 UTC m=+140.305204532 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.167706 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" event={"ID":"ad829316-549b-4b0d-8e17-ded37a000f66","Type":"ContainerStarted","Data":"76f3651a4ad990f334cf1ed2c5361a0deabc6f2bc5311cea2b672549deba4c37"} Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.168708 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.170799 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" event={"ID":"3ffaa46d-e7a6-401c-abfc-5b1d8600de1c","Type":"ContainerStarted","Data":"56010bce9fe862339894b6526e302bfc6850f9e7203a6b94efc9edbc9e932b2f"} Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.171581 4755 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-x78dk container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.171648 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" podUID="7c537b75-83c8-4250-aae0-cacbdb94445f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.190092 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-p4sx7" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.190228 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xtvfv" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.195538 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" podStartSLOduration=120.195516614 podStartE2EDuration="2m0.195516614s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:15.195194814 +0000 UTC m=+139.881260335" watchObservedRunningTime="2025-11-24 01:15:15.195516614 +0000 UTC m=+139.881582115" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.219966 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.220120 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.720098414 +0000 UTC m=+140.406163905 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.220677 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.221075 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.721058994 +0000 UTC m=+140.407124495 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.253218 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-57p55" podStartSLOduration=120.253198455 podStartE2EDuration="2m0.253198455s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:15.216097442 +0000 UTC m=+139.902162953" watchObservedRunningTime="2025-11-24 01:15:15.253198455 +0000 UTC m=+139.939263956" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.321928 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.322132 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.822102269 +0000 UTC m=+140.508167780 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.322776 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.325938 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.825918916 +0000 UTC m=+140.511984507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.425830 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.426037 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.926008123 +0000 UTC m=+140.612073634 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.426249 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.426669 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:15.926658592 +0000 UTC m=+140.612724083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.492701 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-57wzn"] Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.493640 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.499103 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.508314 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-57wzn"] Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.527917 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.528094 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djxdq\" (UniqueName: \"kubernetes.io/projected/9806f604-0abe-4de1-af45-78232744bc87-kube-api-access-djxdq\") pod \"community-operators-57wzn\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.528166 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-catalog-content\") pod \"community-operators-57wzn\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.528196 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-utilities\") pod \"community-operators-57wzn\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.528382 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.028349758 +0000 UTC m=+140.714415259 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.539195 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:15 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:15 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:15 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.539508 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.630334 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-catalog-content\") pod \"community-operators-57wzn\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.630390 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-utilities\") pod \"community-operators-57wzn\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.630457 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.630508 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djxdq\" (UniqueName: \"kubernetes.io/projected/9806f604-0abe-4de1-af45-78232744bc87-kube-api-access-djxdq\") pod \"community-operators-57wzn\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.630822 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-catalog-content\") pod \"community-operators-57wzn\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.631065 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.131050784 +0000 UTC m=+140.817116335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.631321 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-utilities\") pod \"community-operators-57wzn\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.683982 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djxdq\" (UniqueName: \"kubernetes.io/projected/9806f604-0abe-4de1-af45-78232744bc87-kube-api-access-djxdq\") pod \"community-operators-57wzn\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.686387 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-khbxb"] Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.687687 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.692335 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.711065 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-khbxb"] Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.732230 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.732570 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.232555044 +0000 UTC m=+140.918620545 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.809369 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.833208 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-utilities\") pod \"certified-operators-khbxb\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.833447 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfrt6\" (UniqueName: \"kubernetes.io/projected/b1028f34-b287-4775-a138-1ccdae47b7ee-kube-api-access-qfrt6\") pod \"certified-operators-khbxb\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.833627 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.833746 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-catalog-content\") pod \"certified-operators-khbxb\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.833988 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.333973711 +0000 UTC m=+141.020039212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.881970 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-899vs"] Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.883395 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.892894 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-899vs"] Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.934861 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.935017 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-utilities\") pod \"certified-operators-khbxb\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.935043 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.435014737 +0000 UTC m=+141.121080248 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.935085 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfrt6\" (UniqueName: \"kubernetes.io/projected/b1028f34-b287-4775-a138-1ccdae47b7ee-kube-api-access-qfrt6\") pod \"certified-operators-khbxb\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.935170 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.935223 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-catalog-content\") pod \"certified-operators-khbxb\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.935401 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-utilities\") pod \"certified-operators-khbxb\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:15 crc kubenswrapper[4755]: I1124 01:15:15.935710 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-catalog-content\") pod \"certified-operators-khbxb\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:15 crc kubenswrapper[4755]: E1124 01:15:15.935878 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.435858223 +0000 UTC m=+141.121923804 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.021543 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfrt6\" (UniqueName: \"kubernetes.io/projected/b1028f34-b287-4775-a138-1ccdae47b7ee-kube-api-access-qfrt6\") pod \"certified-operators-khbxb\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.035928 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.036140 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-utilities\") pod \"community-operators-899vs\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.036190 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-catalog-content\") pod \"community-operators-899vs\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.036301 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5vxk\" (UniqueName: \"kubernetes.io/projected/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-kube-api-access-n5vxk\") pod \"community-operators-899vs\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.036456 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.536424534 +0000 UTC m=+141.222490075 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.061906 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw2tr" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.124677 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ll47t"] Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.125562 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.140803 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh4g2\" (UniqueName: \"kubernetes.io/projected/601c4d50-2427-46ff-adc3-69c6960d43c2-kube-api-access-lh4g2\") pod \"certified-operators-ll47t\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.140850 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.140897 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5vxk\" (UniqueName: \"kubernetes.io/projected/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-kube-api-access-n5vxk\") pod \"community-operators-899vs\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.140933 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-catalog-content\") pod \"certified-operators-ll47t\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.140952 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-utilities\") pod \"certified-operators-ll47t\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.140974 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-utilities\") pod \"community-operators-899vs\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.140999 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-catalog-content\") pod \"community-operators-899vs\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.141403 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-catalog-content\") pod \"community-operators-899vs\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.141683 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.641668648 +0000 UTC m=+141.327734149 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.142120 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-utilities\") pod \"community-operators-899vs\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.166351 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ll47t"] Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.222511 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5vxk\" (UniqueName: \"kubernetes.io/projected/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-kube-api-access-n5vxk\") pod \"community-operators-899vs\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.243206 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.243320 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.743277951 +0000 UTC m=+141.429343442 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.243506 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.245955 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-catalog-content\") pod \"certified-operators-ll47t\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.246071 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-utilities\") pod \"certified-operators-ll47t\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.247535 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.747522581 +0000 UTC m=+141.433588082 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.248486 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-utilities\") pod \"certified-operators-ll47t\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.250417 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-catalog-content\") pod \"certified-operators-ll47t\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.251263 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh4g2\" (UniqueName: \"kubernetes.io/projected/601c4d50-2427-46ff-adc3-69c6960d43c2-kube-api-access-lh4g2\") pod \"certified-operators-ll47t\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.295925 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh4g2\" (UniqueName: \"kubernetes.io/projected/601c4d50-2427-46ff-adc3-69c6960d43c2-kube-api-access-lh4g2\") pod \"certified-operators-ll47t\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.323970 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.353147 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.353702 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.853662722 +0000 UTC m=+141.539728233 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.376252 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-57wzn"] Nov 24 01:15:16 crc kubenswrapper[4755]: W1124 01:15:16.452005 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9806f604_0abe_4de1_af45_78232744bc87.slice/crio-b2a71abbfd85fbe88e9032b647b8ec4cb7f7a72be2e3cf44eb861e1334e25fc6 WatchSource:0}: Error finding container b2a71abbfd85fbe88e9032b647b8ec4cb7f7a72be2e3cf44eb861e1334e25fc6: Status 404 returned error can't find the container with id b2a71abbfd85fbe88e9032b647b8ec4cb7f7a72be2e3cf44eb861e1334e25fc6 Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.455384 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.455916 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:16.955899605 +0000 UTC m=+141.641965106 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.485817 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.497285 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-899vs" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.539725 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:16 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:16 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:16 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.539784 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.557174 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.557922 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:17.057902019 +0000 UTC m=+141.743967510 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.664270 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.664628 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:17.164617168 +0000 UTC m=+141.850682669 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.724904 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-khbxb"] Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.765544 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.766441 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:17.266421767 +0000 UTC m=+141.952487268 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: W1124 01:15:16.800652 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1028f34_b287_4775_a138_1ccdae47b7ee.slice/crio-1f8b7990f781049540a44ffed8399b00a9cdbdc6a5c880072e2158a2fce8643b WatchSource:0}: Error finding container 1f8b7990f781049540a44ffed8399b00a9cdbdc6a5c880072e2158a2fce8643b: Status 404 returned error can't find the container with id 1f8b7990f781049540a44ffed8399b00a9cdbdc6a5c880072e2158a2fce8643b Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.870516 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.871945 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:17.370963549 +0000 UTC m=+142.057029050 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.900677 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-899vs"] Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.931084 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6623318_d2a9_4015_b310_96a7506f61f9.slice/crio-1b41606d191efd72dfabdf8a9615d0ad8b4816b30272819a227c28f1e9382fa3.scope\": RecentStats: unable to find data in memory cache]" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.953040 4755 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.977104 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.977272 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:17.477248825 +0000 UTC m=+142.163314326 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:16 crc kubenswrapper[4755]: I1124 01:15:16.977387 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:16 crc kubenswrapper[4755]: E1124 01:15:16.977833 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:17.477826143 +0000 UTC m=+142.163891644 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.078057 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:17 crc kubenswrapper[4755]: E1124 01:15:17.078225 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:17.578183698 +0000 UTC m=+142.264249199 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.078381 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:17 crc kubenswrapper[4755]: E1124 01:15:17.078872 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 01:15:17.578859008 +0000 UTC m=+142.264924509 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vxz4v" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.179970 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:17 crc kubenswrapper[4755]: E1124 01:15:17.180356 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 01:15:17.680337277 +0000 UTC m=+142.366402788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.198079 4755 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-11-24T01:15:16.953065667Z","Handler":null,"Name":""} Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.201489 4755 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.201530 4755 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.207803 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerID="71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330" exitCode=0 Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.207901 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khbxb" event={"ID":"b1028f34-b287-4775-a138-1ccdae47b7ee","Type":"ContainerDied","Data":"71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330"} Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.207929 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khbxb" event={"ID":"b1028f34-b287-4775-a138-1ccdae47b7ee","Type":"ContainerStarted","Data":"1f8b7990f781049540a44ffed8399b00a9cdbdc6a5c880072e2158a2fce8643b"} Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.209954 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.210860 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ll47t"] Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.229192 4755 generic.go:334] "Generic (PLEG): container finished" podID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerID="f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2" exitCode=0 Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.229418 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-899vs" event={"ID":"9a1a2052-a1d2-4b6c-bb90-1c51c13499da","Type":"ContainerDied","Data":"f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2"} Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.229470 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-899vs" event={"ID":"9a1a2052-a1d2-4b6c-bb90-1c51c13499da","Type":"ContainerStarted","Data":"2c83a83b4c4305d34c480212296cb79d14011c92e7799bd594a9d111ad1458a2"} Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.239991 4755 generic.go:334] "Generic (PLEG): container finished" podID="e6623318-d2a9-4015-b310-96a7506f61f9" containerID="1b41606d191efd72dfabdf8a9615d0ad8b4816b30272819a227c28f1e9382fa3" exitCode=0 Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.240106 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" event={"ID":"e6623318-d2a9-4015-b310-96a7506f61f9","Type":"ContainerDied","Data":"1b41606d191efd72dfabdf8a9615d0ad8b4816b30272819a227c28f1e9382fa3"} Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.248139 4755 generic.go:334] "Generic (PLEG): container finished" podID="9806f604-0abe-4de1-af45-78232744bc87" containerID="e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84" exitCode=0 Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.248203 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57wzn" event={"ID":"9806f604-0abe-4de1-af45-78232744bc87","Type":"ContainerDied","Data":"e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84"} Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.248399 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57wzn" event={"ID":"9806f604-0abe-4de1-af45-78232744bc87","Type":"ContainerStarted","Data":"b2a71abbfd85fbe88e9032b647b8ec4cb7f7a72be2e3cf44eb861e1334e25fc6"} Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.262242 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mqx68" event={"ID":"b0af14ce-aaea-427f-bf89-15c1f4090ee7","Type":"ContainerStarted","Data":"e0c324403e528cff19214d9b831654db0f629e289186d9f1c2546a63943c5ef6"} Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.262444 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mqx68" event={"ID":"b0af14ce-aaea-427f-bf89-15c1f4090ee7","Type":"ContainerStarted","Data":"3ae49cc4b57ab287a68f69c41f77afe45002e9a52acfd998cc185f1da081211e"} Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.286228 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.294070 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.294296 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.325806 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vxz4v\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.388626 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.390170 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-hqh6f" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.401862 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.529543 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.538729 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:17 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:17 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:17 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.538812 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.675022 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fdzng"] Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.676307 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.682931 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.691348 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fdzng"] Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.794653 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t56kr\" (UniqueName: \"kubernetes.io/projected/21501e82-61af-435e-97f5-767cb357cbfb-kube-api-access-t56kr\") pod \"redhat-marketplace-fdzng\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.794821 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-utilities\") pod \"redhat-marketplace-fdzng\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.794930 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-catalog-content\") pod \"redhat-marketplace-fdzng\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.895846 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-catalog-content\") pod \"redhat-marketplace-fdzng\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.895999 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t56kr\" (UniqueName: \"kubernetes.io/projected/21501e82-61af-435e-97f5-767cb357cbfb-kube-api-access-t56kr\") pod \"redhat-marketplace-fdzng\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.896059 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-utilities\") pod \"redhat-marketplace-fdzng\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.897439 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-utilities\") pod \"redhat-marketplace-fdzng\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.899020 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-catalog-content\") pod \"redhat-marketplace-fdzng\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.924127 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t56kr\" (UniqueName: \"kubernetes.io/projected/21501e82-61af-435e-97f5-767cb357cbfb-kube-api-access-t56kr\") pod \"redhat-marketplace-fdzng\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:17 crc kubenswrapper[4755]: I1124 01:15:17.981808 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vxz4v"] Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.013570 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.014816 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.082312 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9nxdv"] Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.083518 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.097443 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nxdv"] Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.201265 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-catalog-content\") pod \"redhat-marketplace-9nxdv\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.201315 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bfk7\" (UniqueName: \"kubernetes.io/projected/b0f48fab-6749-416a-b712-c1dcb42b45d1-kube-api-access-2bfk7\") pod \"redhat-marketplace-9nxdv\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.201803 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-utilities\") pod \"redhat-marketplace-9nxdv\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.300917 4755 generic.go:334] "Generic (PLEG): container finished" podID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerID="9c12c724b641079f8d6a3fd5cda733a9bf101ea5c962843af955b5f0399f567a" exitCode=0 Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.301103 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll47t" event={"ID":"601c4d50-2427-46ff-adc3-69c6960d43c2","Type":"ContainerDied","Data":"9c12c724b641079f8d6a3fd5cda733a9bf101ea5c962843af955b5f0399f567a"} Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.301208 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll47t" event={"ID":"601c4d50-2427-46ff-adc3-69c6960d43c2","Type":"ContainerStarted","Data":"f1e4d297e2cb065dc4363fe1323b77414d9a1559128e3b6aac93fb87dbb3e3ea"} Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.302439 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-utilities\") pod \"redhat-marketplace-9nxdv\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.302494 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-catalog-content\") pod \"redhat-marketplace-9nxdv\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.302523 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bfk7\" (UniqueName: \"kubernetes.io/projected/b0f48fab-6749-416a-b712-c1dcb42b45d1-kube-api-access-2bfk7\") pod \"redhat-marketplace-9nxdv\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.303536 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-catalog-content\") pod \"redhat-marketplace-9nxdv\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.303647 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-utilities\") pod \"redhat-marketplace-9nxdv\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.321839 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mqx68" event={"ID":"b0af14ce-aaea-427f-bf89-15c1f4090ee7","Type":"ContainerStarted","Data":"0fd5de2ce190075503169662e74010c9e6badd96d4cd55d9de75d78767bf6ef0"} Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.325298 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bfk7\" (UniqueName: \"kubernetes.io/projected/b0f48fab-6749-416a-b712-c1dcb42b45d1-kube-api-access-2bfk7\") pod \"redhat-marketplace-9nxdv\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.327392 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" event={"ID":"2ca81542-2eef-4099-92bd-301845e4d3c8","Type":"ContainerStarted","Data":"22cda212408d1d60b8a969141e808666097cf87d6372dab54547d6b93ea312f4"} Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.366384 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-mqx68" podStartSLOduration=11.366349557 podStartE2EDuration="11.366349557s" podCreationTimestamp="2025-11-24 01:15:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:18.343822279 +0000 UTC m=+143.029887790" watchObservedRunningTime="2025-11-24 01:15:18.366349557 +0000 UTC m=+143.052415058" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.377012 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fdzng"] Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.378480 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" podStartSLOduration=123.378432116 podStartE2EDuration="2m3.378432116s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:18.367823382 +0000 UTC m=+143.053888883" watchObservedRunningTime="2025-11-24 01:15:18.378432116 +0000 UTC m=+143.064497617" Nov 24 01:15:18 crc kubenswrapper[4755]: W1124 01:15:18.382088 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21501e82_61af_435e_97f5_767cb357cbfb.slice/crio-79126ab9b84e434e46e2c88609ad7bbe2b998ea393eccb7db51a806185eff09c WatchSource:0}: Error finding container 79126ab9b84e434e46e2c88609ad7bbe2b998ea393eccb7db51a806185eff09c: Status 404 returned error can't find the container with id 79126ab9b84e434e46e2c88609ad7bbe2b998ea393eccb7db51a806185eff09c Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.416358 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.471942 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.473129 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.477393 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.477553 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.492300 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.510127 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.510192 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.537407 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:18 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:18 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:18 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.537478 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.583332 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.618991 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rx2cg\" (UniqueName: \"kubernetes.io/projected/e6623318-d2a9-4015-b310-96a7506f61f9-kube-api-access-rx2cg\") pod \"e6623318-d2a9-4015-b310-96a7506f61f9\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.619237 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6623318-d2a9-4015-b310-96a7506f61f9-config-volume\") pod \"e6623318-d2a9-4015-b310-96a7506f61f9\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.619295 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e6623318-d2a9-4015-b310-96a7506f61f9-secret-volume\") pod \"e6623318-d2a9-4015-b310-96a7506f61f9\" (UID: \"e6623318-d2a9-4015-b310-96a7506f61f9\") " Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.620063 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.621206 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.621396 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.622469 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6623318-d2a9-4015-b310-96a7506f61f9-config-volume" (OuterVolumeSpecName: "config-volume") pod "e6623318-d2a9-4015-b310-96a7506f61f9" (UID: "e6623318-d2a9-4015-b310-96a7506f61f9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.636721 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6623318-d2a9-4015-b310-96a7506f61f9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e6623318-d2a9-4015-b310-96a7506f61f9" (UID: "e6623318-d2a9-4015-b310-96a7506f61f9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.636878 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6623318-d2a9-4015-b310-96a7506f61f9-kube-api-access-rx2cg" (OuterVolumeSpecName: "kube-api-access-rx2cg") pod "e6623318-d2a9-4015-b310-96a7506f61f9" (UID: "e6623318-d2a9-4015-b310-96a7506f61f9"). InnerVolumeSpecName "kube-api-access-rx2cg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.640299 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.682041 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rm9z9"] Nov 24 01:15:18 crc kubenswrapper[4755]: E1124 01:15:18.682457 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6623318-d2a9-4015-b310-96a7506f61f9" containerName="collect-profiles" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.682477 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6623318-d2a9-4015-b310-96a7506f61f9" containerName="collect-profiles" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.682831 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6623318-d2a9-4015-b310-96a7506f61f9" containerName="collect-profiles" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.684563 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.690907 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.695386 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rm9z9"] Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.722461 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-catalog-content\") pod \"redhat-operators-rm9z9\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.722554 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-utilities\") pod \"redhat-operators-rm9z9\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.722618 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7phb4\" (UniqueName: \"kubernetes.io/projected/1c115205-515a-4f53-96ec-8559f1744b9b-kube-api-access-7phb4\") pod \"redhat-operators-rm9z9\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.722695 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6623318-d2a9-4015-b310-96a7506f61f9-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.722709 4755 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e6623318-d2a9-4015-b310-96a7506f61f9-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.722720 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rx2cg\" (UniqueName: \"kubernetes.io/projected/e6623318-d2a9-4015-b310-96a7506f61f9-kube-api-access-rx2cg\") on node \"crc\" DevicePath \"\"" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.822530 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.823597 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-utilities\") pod \"redhat-operators-rm9z9\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.823674 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7phb4\" (UniqueName: \"kubernetes.io/projected/1c115205-515a-4f53-96ec-8559f1744b9b-kube-api-access-7phb4\") pod \"redhat-operators-rm9z9\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.823741 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-catalog-content\") pod \"redhat-operators-rm9z9\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.824140 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-catalog-content\") pod \"redhat-operators-rm9z9\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.824343 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-utilities\") pod \"redhat-operators-rm9z9\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.853414 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7phb4\" (UniqueName: \"kubernetes.io/projected/1c115205-515a-4f53-96ec-8559f1744b9b-kube-api-access-7phb4\") pod \"redhat-operators-rm9z9\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:18 crc kubenswrapper[4755]: I1124 01:15:18.960933 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nxdv"] Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.018104 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.083975 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4879p"] Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.085278 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.096074 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4879p"] Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.133045 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-catalog-content\") pod \"redhat-operators-4879p\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.133095 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-utilities\") pod \"redhat-operators-4879p\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.133448 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf299\" (UniqueName: \"kubernetes.io/projected/78f7ae95-68af-4e1d-8d97-8e675d6d3323-kube-api-access-pf299\") pod \"redhat-operators-4879p\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.168475 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 24 01:15:19 crc kubenswrapper[4755]: W1124 01:15:19.195977 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod812e4b1b_8992_4f05_ba15_fdcc58c5bf7a.slice/crio-ea9121662ae14b9db349202a6c579dfc43bebd40976af3e9b3cbc0f39282a74e WatchSource:0}: Error finding container ea9121662ae14b9db349202a6c579dfc43bebd40976af3e9b3cbc0f39282a74e: Status 404 returned error can't find the container with id ea9121662ae14b9db349202a6c579dfc43bebd40976af3e9b3cbc0f39282a74e Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.229740 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rm9z9"] Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.235534 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf299\" (UniqueName: \"kubernetes.io/projected/78f7ae95-68af-4e1d-8d97-8e675d6d3323-kube-api-access-pf299\") pod \"redhat-operators-4879p\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.235641 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-catalog-content\") pod \"redhat-operators-4879p\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.235663 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-utilities\") pod \"redhat-operators-4879p\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.236197 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-catalog-content\") pod \"redhat-operators-4879p\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.236216 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-utilities\") pod \"redhat-operators-4879p\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: W1124 01:15:19.238375 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c115205_515a_4f53_96ec_8559f1744b9b.slice/crio-1976d6b561a993d495abb231fe3e74fc12ea48be71a316524125b5f9fd22e681 WatchSource:0}: Error finding container 1976d6b561a993d495abb231fe3e74fc12ea48be71a316524125b5f9fd22e681: Status 404 returned error can't find the container with id 1976d6b561a993d495abb231fe3e74fc12ea48be71a316524125b5f9fd22e681 Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.258520 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf299\" (UniqueName: \"kubernetes.io/projected/78f7ae95-68af-4e1d-8d97-8e675d6d3323-kube-api-access-pf299\") pod \"redhat-operators-4879p\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.296843 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.300935 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-vmwlz" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.359723 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.360518 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.381677 4755 patch_prober.go:28] interesting pod/console-f9d7485db-mlls8 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.381745 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-mlls8" podUID="65c5f11b-931e-4dc2-8c3e-c7180b94ec08" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.385645 4755 generic.go:334] "Generic (PLEG): container finished" podID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerID="172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793" exitCode=0 Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.385973 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nxdv" event={"ID":"b0f48fab-6749-416a-b712-c1dcb42b45d1","Type":"ContainerDied","Data":"172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793"} Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.386030 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nxdv" event={"ID":"b0f48fab-6749-416a-b712-c1dcb42b45d1","Type":"ContainerStarted","Data":"68609947ed03e2430a16e4dbb0b77fc50d82dc8e3e0768a7f4b5af07cb854c0c"} Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.404327 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.451312 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm9z9" event={"ID":"1c115205-515a-4f53-96ec-8559f1744b9b","Type":"ContainerStarted","Data":"1976d6b561a993d495abb231fe3e74fc12ea48be71a316524125b5f9fd22e681"} Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.453693 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a","Type":"ContainerStarted","Data":"ea9121662ae14b9db349202a6c579dfc43bebd40976af3e9b3cbc0f39282a74e"} Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.465901 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" event={"ID":"2ca81542-2eef-4099-92bd-301845e4d3c8","Type":"ContainerStarted","Data":"bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65"} Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.466631 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.471301 4755 generic.go:334] "Generic (PLEG): container finished" podID="21501e82-61af-435e-97f5-767cb357cbfb" containerID="60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915" exitCode=0 Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.471357 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fdzng" event={"ID":"21501e82-61af-435e-97f5-767cb357cbfb","Type":"ContainerDied","Data":"60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915"} Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.471378 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fdzng" event={"ID":"21501e82-61af-435e-97f5-767cb357cbfb","Type":"ContainerStarted","Data":"79126ab9b84e434e46e2c88609ad7bbe2b998ea393eccb7db51a806185eff09c"} Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.482473 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" event={"ID":"e6623318-d2a9-4015-b310-96a7506f61f9","Type":"ContainerDied","Data":"efc99f8231508c8fb7a853d1b90f8a1d4696d7f826c058d4f0d9b4800a23b81e"} Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.482714 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efc99f8231508c8fb7a853d1b90f8a1d4696d7f826c058d4f0d9b4800a23b81e" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.482823 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.538214 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:19 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:19 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:19 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.538279 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.562995 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-vnvq4 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.563085 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-vnvq4" podUID="d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.563327 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-vnvq4 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.563792 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vnvq4" podUID="d55c6dc8-108f-4f8c-a6cf-4c7e1363c9e4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.570157 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.796087 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.800375 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4879p"] Nov 24 01:15:19 crc kubenswrapper[4755]: I1124 01:15:19.810054 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cslz2" Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.500229 4755 generic.go:334] "Generic (PLEG): container finished" podID="1c115205-515a-4f53-96ec-8559f1744b9b" containerID="67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e" exitCode=0 Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.500304 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm9z9" event={"ID":"1c115205-515a-4f53-96ec-8559f1744b9b","Type":"ContainerDied","Data":"67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e"} Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.504256 4755 generic.go:334] "Generic (PLEG): container finished" podID="812e4b1b-8992-4f05-ba15-fdcc58c5bf7a" containerID="84f0cd6e6ef2062f7b2ca380241a346467497bb63427e1a47e330963093c12ec" exitCode=0 Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.504311 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a","Type":"ContainerDied","Data":"84f0cd6e6ef2062f7b2ca380241a346467497bb63427e1a47e330963093c12ec"} Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.507009 4755 generic.go:334] "Generic (PLEG): container finished" podID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerID="70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d" exitCode=0 Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.507107 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4879p" event={"ID":"78f7ae95-68af-4e1d-8d97-8e675d6d3323","Type":"ContainerDied","Data":"70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d"} Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.507139 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4879p" event={"ID":"78f7ae95-68af-4e1d-8d97-8e675d6d3323","Type":"ContainerStarted","Data":"18f3383d7c75f4ebb68756b36dba9a50932f5a52016df256455373c1e9e9acea"} Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.514817 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.535482 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.541035 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:20 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:20 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:20 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:20 crc kubenswrapper[4755]: I1124 01:15:20.541086 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:21 crc kubenswrapper[4755]: I1124 01:15:21.536823 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:21 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:21 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:21 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:21 crc kubenswrapper[4755]: I1124 01:15:21.536879 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:21 crc kubenswrapper[4755]: I1124 01:15:21.770546 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 01:15:21 crc kubenswrapper[4755]: I1124 01:15:21.783964 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kubelet-dir\") pod \"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a\" (UID: \"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a\") " Nov 24 01:15:21 crc kubenswrapper[4755]: I1124 01:15:21.784038 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kube-api-access\") pod \"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a\" (UID: \"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a\") " Nov 24 01:15:21 crc kubenswrapper[4755]: I1124 01:15:21.787822 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "812e4b1b-8992-4f05-ba15-fdcc58c5bf7a" (UID: "812e4b1b-8992-4f05-ba15-fdcc58c5bf7a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:15:21 crc kubenswrapper[4755]: I1124 01:15:21.810070 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "812e4b1b-8992-4f05-ba15-fdcc58c5bf7a" (UID: "812e4b1b-8992-4f05-ba15-fdcc58c5bf7a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:15:21 crc kubenswrapper[4755]: I1124 01:15:21.885535 4755 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 24 01:15:21 crc kubenswrapper[4755]: I1124 01:15:21.885571 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/812e4b1b-8992-4f05-ba15-fdcc58c5bf7a-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.537575 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:22 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:22 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:22 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.537662 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.542179 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"812e4b1b-8992-4f05-ba15-fdcc58c5bf7a","Type":"ContainerDied","Data":"ea9121662ae14b9db349202a6c579dfc43bebd40976af3e9b3cbc0f39282a74e"} Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.542213 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea9121662ae14b9db349202a6c579dfc43bebd40976af3e9b3cbc0f39282a74e" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.542261 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.890612 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 24 01:15:22 crc kubenswrapper[4755]: E1124 01:15:22.890898 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="812e4b1b-8992-4f05-ba15-fdcc58c5bf7a" containerName="pruner" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.890911 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="812e4b1b-8992-4f05-ba15-fdcc58c5bf7a" containerName="pruner" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.891034 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="812e4b1b-8992-4f05-ba15-fdcc58c5bf7a" containerName="pruner" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.891408 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.893136 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.895071 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.895239 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.903170 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d817b373-6b9c-49b1-9918-16279916b86a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d817b373-6b9c-49b1-9918-16279916b86a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 01:15:22 crc kubenswrapper[4755]: I1124 01:15:22.903279 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d817b373-6b9c-49b1-9918-16279916b86a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d817b373-6b9c-49b1-9918-16279916b86a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.007212 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d817b373-6b9c-49b1-9918-16279916b86a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d817b373-6b9c-49b1-9918-16279916b86a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.007309 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d817b373-6b9c-49b1-9918-16279916b86a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d817b373-6b9c-49b1-9918-16279916b86a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.007403 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d817b373-6b9c-49b1-9918-16279916b86a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d817b373-6b9c-49b1-9918-16279916b86a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.070280 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d817b373-6b9c-49b1-9918-16279916b86a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d817b373-6b9c-49b1-9918-16279916b86a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.246939 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.536291 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:23 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:23 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:23 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.536816 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.824096 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.824152 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.829305 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.829817 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.916121 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.919083 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.921751 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.925206 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.925261 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.926230 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:15:23 crc kubenswrapper[4755]: I1124 01:15:23.934699 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:15:24 crc kubenswrapper[4755]: I1124 01:15:24.133900 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 01:15:24 crc kubenswrapper[4755]: I1124 01:15:24.537291 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:24 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:24 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:24 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:24 crc kubenswrapper[4755]: I1124 01:15:24.537369 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:25 crc kubenswrapper[4755]: I1124 01:15:25.541781 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:25 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:25 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:25 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:25 crc kubenswrapper[4755]: I1124 01:15:25.541885 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:25 crc kubenswrapper[4755]: I1124 01:15:25.619088 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-g92v4" Nov 24 01:15:26 crc kubenswrapper[4755]: I1124 01:15:26.538647 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:26 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:26 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:26 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:26 crc kubenswrapper[4755]: I1124 01:15:26.538950 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:27 crc kubenswrapper[4755]: I1124 01:15:27.537757 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:27 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:27 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:27 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:27 crc kubenswrapper[4755]: I1124 01:15:27.537818 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:28 crc kubenswrapper[4755]: I1124 01:15:28.538003 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:28 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:28 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:28 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:28 crc kubenswrapper[4755]: I1124 01:15:28.538354 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:29 crc kubenswrapper[4755]: I1124 01:15:29.360777 4755 patch_prober.go:28] interesting pod/console-f9d7485db-mlls8 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Nov 24 01:15:29 crc kubenswrapper[4755]: I1124 01:15:29.360862 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-mlls8" podUID="65c5f11b-931e-4dc2-8c3e-c7180b94ec08" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Nov 24 01:15:29 crc kubenswrapper[4755]: I1124 01:15:29.536462 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:29 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Nov 24 01:15:29 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:29 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:29 crc kubenswrapper[4755]: I1124 01:15:29.536534 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:29 crc kubenswrapper[4755]: I1124 01:15:29.568965 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-vnvq4" Nov 24 01:15:30 crc kubenswrapper[4755]: I1124 01:15:30.536471 4755 patch_prober.go:28] interesting pod/router-default-5444994796-nfhp7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 01:15:30 crc kubenswrapper[4755]: [+]has-synced ok Nov 24 01:15:30 crc kubenswrapper[4755]: [+]process-running ok Nov 24 01:15:30 crc kubenswrapper[4755]: healthz check failed Nov 24 01:15:30 crc kubenswrapper[4755]: I1124 01:15:30.536535 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfhp7" podUID="b2977dd9-b5c2-40cd-bcd7-12f91d2edf31" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 01:15:31 crc kubenswrapper[4755]: I1124 01:15:31.538021 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:31 crc kubenswrapper[4755]: I1124 01:15:31.540985 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-nfhp7" Nov 24 01:15:33 crc kubenswrapper[4755]: I1124 01:15:33.295773 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:15:33 crc kubenswrapper[4755]: I1124 01:15:33.296237 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:15:33 crc kubenswrapper[4755]: I1124 01:15:33.618394 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d817b373-6b9c-49b1-9918-16279916b86a","Type":"ContainerStarted","Data":"4d60a71c805b1d0ab0f45d53c69895caee78957b4a89f5a3efacdb3891413ce6"} Nov 24 01:15:37 crc kubenswrapper[4755]: I1124 01:15:37.537330 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:15:37 crc kubenswrapper[4755]: I1124 01:15:37.666104 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:15:37 crc kubenswrapper[4755]: I1124 01:15:37.676374 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ccb86693-0b66-43ca-a2d1-e9594521d30f-metrics-certs\") pod \"network-metrics-daemon-9cl8m\" (UID: \"ccb86693-0b66-43ca-a2d1-e9594521d30f\") " pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:15:37 crc kubenswrapper[4755]: I1124 01:15:37.921972 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9cl8m" Nov 24 01:15:39 crc kubenswrapper[4755]: I1124 01:15:39.366132 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:39 crc kubenswrapper[4755]: I1124 01:15:39.371755 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:15:48 crc kubenswrapper[4755]: E1124 01:15:48.155006 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 24 01:15:48 crc kubenswrapper[4755]: E1124 01:15:48.155444 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-djxdq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-57wzn_openshift-marketplace(9806f604-0abe-4de1-af45-78232744bc87): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 01:15:48 crc kubenswrapper[4755]: E1124 01:15:48.156671 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-57wzn" podUID="9806f604-0abe-4de1-af45-78232744bc87" Nov 24 01:15:49 crc kubenswrapper[4755]: E1124 01:15:49.149175 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-57wzn" podUID="9806f604-0abe-4de1-af45-78232744bc87" Nov 24 01:15:50 crc kubenswrapper[4755]: E1124 01:15:50.283443 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 24 01:15:50 crc kubenswrapper[4755]: E1124 01:15:50.283917 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pf299,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-4879p_openshift-marketplace(78f7ae95-68af-4e1d-8d97-8e675d6d3323): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 01:15:50 crc kubenswrapper[4755]: E1124 01:15:50.285152 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-4879p" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" Nov 24 01:15:50 crc kubenswrapper[4755]: I1124 01:15:50.497118 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-4hrgd" Nov 24 01:15:51 crc kubenswrapper[4755]: E1124 01:15:51.971417 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-4879p" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.316453 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-9cl8m"] Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.341350 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.341501 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lh4g2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-ll47t_openshift-marketplace(601c4d50-2427-46ff-adc3-69c6960d43c2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.342874 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-ll47t" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.509383 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.510250 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n5vxk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-899vs_openshift-marketplace(9a1a2052-a1d2-4b6c-bb90-1c51c13499da): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.511548 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-899vs" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" Nov 24 01:15:52 crc kubenswrapper[4755]: W1124 01:15:52.592866 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-887300e4a0c4ba55d591b9f007202eb0c043344414cc9e5a4e279defdde3e1c7 WatchSource:0}: Error finding container 887300e4a0c4ba55d591b9f007202eb0c043344414cc9e5a4e279defdde3e1c7: Status 404 returned error can't find the container with id 887300e4a0c4ba55d591b9f007202eb0c043344414cc9e5a4e279defdde3e1c7 Nov 24 01:15:52 crc kubenswrapper[4755]: W1124 01:15:52.594697 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-34dbc343673993ee59ad14f706b36410d8b72340329a6b637e6e1bf0e0b2559e WatchSource:0}: Error finding container 34dbc343673993ee59ad14f706b36410d8b72340329a6b637e6e1bf0e0b2559e: Status 404 returned error can't find the container with id 34dbc343673993ee59ad14f706b36410d8b72340329a6b637e6e1bf0e0b2559e Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.709582 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"887300e4a0c4ba55d591b9f007202eb0c043344414cc9e5a4e279defdde3e1c7"} Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.711812 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"39103adc9c5813b1e1f2e2412006663fd142ba7de3ccb78a4b9e4e6ee443887d"} Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.711869 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"7b0f36d862c4a564323bc2e5ac7d8fcd3ddae9a2f6ee129b740a99e6124fab1b"} Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.715425 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" event={"ID":"ccb86693-0b66-43ca-a2d1-e9594521d30f","Type":"ContainerStarted","Data":"b6b0acbf78a5f8ae35e2de68f97938e3d4e29af11fd2176d7415c691f8ce1cee"} Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.715498 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" event={"ID":"ccb86693-0b66-43ca-a2d1-e9594521d30f","Type":"ContainerStarted","Data":"cd00d6a0e1f27396de73a69d7ef5a8f3684bedc928f901f54cb57b00249802ae"} Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.720573 4755 generic.go:334] "Generic (PLEG): container finished" podID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerID="2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb" exitCode=0 Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.720872 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nxdv" event={"ID":"b0f48fab-6749-416a-b712-c1dcb42b45d1","Type":"ContainerDied","Data":"2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb"} Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.722040 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.722231 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t56kr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-fdzng_openshift-marketplace(21501e82-61af-435e-97f5-767cb357cbfb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.723324 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-fdzng" podUID="21501e82-61af-435e-97f5-767cb357cbfb" Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.723505 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d817b373-6b9c-49b1-9918-16279916b86a","Type":"ContainerStarted","Data":"3b17c7d757cbe83d59118db495410b63bb79e76f0508d71ce08ac6f3a3576c93"} Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.728369 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"34dbc343673993ee59ad14f706b36410d8b72340329a6b637e6e1bf0e0b2559e"} Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.732037 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-899vs" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.732157 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-ll47t" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" Nov 24 01:15:52 crc kubenswrapper[4755]: I1124 01:15:52.746804 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=30.746783454 podStartE2EDuration="30.746783454s" podCreationTimestamp="2025-11-24 01:15:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:52.74272689 +0000 UTC m=+177.428792391" watchObservedRunningTime="2025-11-24 01:15:52.746783454 +0000 UTC m=+177.432848945" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.975483 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.975702 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qfrt6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-khbxb_openshift-marketplace(b1028f34-b287-4775-a138-1ccdae47b7ee): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 01:15:52 crc kubenswrapper[4755]: E1124 01:15:52.980259 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-khbxb" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" Nov 24 01:15:53 crc kubenswrapper[4755]: E1124 01:15:53.417509 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 24 01:15:53 crc kubenswrapper[4755]: E1124 01:15:53.417696 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7phb4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-rm9z9_openshift-marketplace(1c115205-515a-4f53-96ec-8559f1744b9b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 01:15:53 crc kubenswrapper[4755]: E1124 01:15:53.419268 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-rm9z9" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" Nov 24 01:15:53 crc kubenswrapper[4755]: I1124 01:15:53.734702 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9cl8m" event={"ID":"ccb86693-0b66-43ca-a2d1-e9594521d30f","Type":"ContainerStarted","Data":"02a5f859cc97855abe2fecb4e8bcd4928a7a9ea45062519df370e30be4ff2694"} Nov 24 01:15:53 crc kubenswrapper[4755]: I1124 01:15:53.737666 4755 generic.go:334] "Generic (PLEG): container finished" podID="d817b373-6b9c-49b1-9918-16279916b86a" containerID="3b17c7d757cbe83d59118db495410b63bb79e76f0508d71ce08ac6f3a3576c93" exitCode=0 Nov 24 01:15:53 crc kubenswrapper[4755]: I1124 01:15:53.737738 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d817b373-6b9c-49b1-9918-16279916b86a","Type":"ContainerDied","Data":"3b17c7d757cbe83d59118db495410b63bb79e76f0508d71ce08ac6f3a3576c93"} Nov 24 01:15:53 crc kubenswrapper[4755]: I1124 01:15:53.740664 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"64322ced0a2eace48ff8697f95e92626ebec0955347c302bad71d37a5779d3e6"} Nov 24 01:15:53 crc kubenswrapper[4755]: I1124 01:15:53.741032 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:15:53 crc kubenswrapper[4755]: I1124 01:15:53.743898 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ebe6818b0c11206a03ab321697459185ef30b49311edddd2e0c8eaaba50b479c"} Nov 24 01:15:53 crc kubenswrapper[4755]: E1124 01:15:53.747404 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-rm9z9" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" Nov 24 01:15:53 crc kubenswrapper[4755]: E1124 01:15:53.747505 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-khbxb" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" Nov 24 01:15:53 crc kubenswrapper[4755]: E1124 01:15:53.749325 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-fdzng" podUID="21501e82-61af-435e-97f5-767cb357cbfb" Nov 24 01:15:53 crc kubenswrapper[4755]: I1124 01:15:53.765153 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-9cl8m" podStartSLOduration=158.765128944 podStartE2EDuration="2m38.765128944s" podCreationTimestamp="2025-11-24 01:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:15:53.764428002 +0000 UTC m=+178.450493513" watchObservedRunningTime="2025-11-24 01:15:53.765128944 +0000 UTC m=+178.451194455" Nov 24 01:15:54 crc kubenswrapper[4755]: I1124 01:15:54.763311 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nxdv" event={"ID":"b0f48fab-6749-416a-b712-c1dcb42b45d1","Type":"ContainerStarted","Data":"95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678"} Nov 24 01:15:54 crc kubenswrapper[4755]: I1124 01:15:54.786324 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9nxdv" podStartSLOduration=2.064482917 podStartE2EDuration="36.78630529s" podCreationTimestamp="2025-11-24 01:15:18 +0000 UTC" firstStartedPulling="2025-11-24 01:15:19.393256498 +0000 UTC m=+144.079321999" lastFinishedPulling="2025-11-24 01:15:54.115078871 +0000 UTC m=+178.801144372" observedRunningTime="2025-11-24 01:15:54.784132133 +0000 UTC m=+179.470197664" watchObservedRunningTime="2025-11-24 01:15:54.78630529 +0000 UTC m=+179.472370791" Nov 24 01:15:55 crc kubenswrapper[4755]: I1124 01:15:55.054891 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 01:15:55 crc kubenswrapper[4755]: I1124 01:15:55.106950 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d817b373-6b9c-49b1-9918-16279916b86a-kubelet-dir\") pod \"d817b373-6b9c-49b1-9918-16279916b86a\" (UID: \"d817b373-6b9c-49b1-9918-16279916b86a\") " Nov 24 01:15:55 crc kubenswrapper[4755]: I1124 01:15:55.107116 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d817b373-6b9c-49b1-9918-16279916b86a-kube-api-access\") pod \"d817b373-6b9c-49b1-9918-16279916b86a\" (UID: \"d817b373-6b9c-49b1-9918-16279916b86a\") " Nov 24 01:15:55 crc kubenswrapper[4755]: I1124 01:15:55.107127 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d817b373-6b9c-49b1-9918-16279916b86a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d817b373-6b9c-49b1-9918-16279916b86a" (UID: "d817b373-6b9c-49b1-9918-16279916b86a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:15:55 crc kubenswrapper[4755]: I1124 01:15:55.107321 4755 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d817b373-6b9c-49b1-9918-16279916b86a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 24 01:15:55 crc kubenswrapper[4755]: I1124 01:15:55.112728 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d817b373-6b9c-49b1-9918-16279916b86a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d817b373-6b9c-49b1-9918-16279916b86a" (UID: "d817b373-6b9c-49b1-9918-16279916b86a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:15:55 crc kubenswrapper[4755]: I1124 01:15:55.208847 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d817b373-6b9c-49b1-9918-16279916b86a-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 01:15:55 crc kubenswrapper[4755]: I1124 01:15:55.781961 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d817b373-6b9c-49b1-9918-16279916b86a","Type":"ContainerDied","Data":"4d60a71c805b1d0ab0f45d53c69895caee78957b4a89f5a3efacdb3891413ce6"} Nov 24 01:15:55 crc kubenswrapper[4755]: I1124 01:15:55.782303 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d60a71c805b1d0ab0f45d53c69895caee78957b4a89f5a3efacdb3891413ce6" Nov 24 01:15:55 crc kubenswrapper[4755]: I1124 01:15:55.781998 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 01:15:58 crc kubenswrapper[4755]: I1124 01:15:58.417291 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:58 crc kubenswrapper[4755]: I1124 01:15:58.419208 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:15:58 crc kubenswrapper[4755]: I1124 01:15:58.759141 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:16:03 crc kubenswrapper[4755]: I1124 01:16:03.295257 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:16:03 crc kubenswrapper[4755]: I1124 01:16:03.296644 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:16:03 crc kubenswrapper[4755]: I1124 01:16:03.824196 4755 generic.go:334] "Generic (PLEG): container finished" podID="9806f604-0abe-4de1-af45-78232744bc87" containerID="0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9" exitCode=0 Nov 24 01:16:03 crc kubenswrapper[4755]: I1124 01:16:03.824240 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57wzn" event={"ID":"9806f604-0abe-4de1-af45-78232744bc87","Type":"ContainerDied","Data":"0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9"} Nov 24 01:16:04 crc kubenswrapper[4755]: I1124 01:16:04.830538 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57wzn" event={"ID":"9806f604-0abe-4de1-af45-78232744bc87","Type":"ContainerStarted","Data":"997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049"} Nov 24 01:16:04 crc kubenswrapper[4755]: I1124 01:16:04.832838 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-899vs" event={"ID":"9a1a2052-a1d2-4b6c-bb90-1c51c13499da","Type":"ContainerStarted","Data":"ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d"} Nov 24 01:16:04 crc kubenswrapper[4755]: I1124 01:16:04.850207 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-57wzn" podStartSLOduration=2.814882877 podStartE2EDuration="49.850188971s" podCreationTimestamp="2025-11-24 01:15:15 +0000 UTC" firstStartedPulling="2025-11-24 01:15:17.250750358 +0000 UTC m=+141.936815859" lastFinishedPulling="2025-11-24 01:16:04.286056452 +0000 UTC m=+188.972121953" observedRunningTime="2025-11-24 01:16:04.849909702 +0000 UTC m=+189.535975203" watchObservedRunningTime="2025-11-24 01:16:04.850188971 +0000 UTC m=+189.536254472" Nov 24 01:16:05 crc kubenswrapper[4755]: I1124 01:16:05.811073 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:16:05 crc kubenswrapper[4755]: I1124 01:16:05.811133 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:16:05 crc kubenswrapper[4755]: I1124 01:16:05.840256 4755 generic.go:334] "Generic (PLEG): container finished" podID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerID="ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d" exitCode=0 Nov 24 01:16:05 crc kubenswrapper[4755]: I1124 01:16:05.840981 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-899vs" event={"ID":"9a1a2052-a1d2-4b6c-bb90-1c51c13499da","Type":"ContainerDied","Data":"ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d"} Nov 24 01:16:06 crc kubenswrapper[4755]: I1124 01:16:06.848865 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll47t" event={"ID":"601c4d50-2427-46ff-adc3-69c6960d43c2","Type":"ContainerStarted","Data":"52945befc58a2caf2afa0272e9cd3d561780193a711a3905d8383c6941d7e8dc"} Nov 24 01:16:06 crc kubenswrapper[4755]: I1124 01:16:06.856690 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-57wzn" podUID="9806f604-0abe-4de1-af45-78232744bc87" containerName="registry-server" probeResult="failure" output=< Nov 24 01:16:06 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Nov 24 01:16:06 crc kubenswrapper[4755]: > Nov 24 01:16:07 crc kubenswrapper[4755]: I1124 01:16:07.857373 4755 generic.go:334] "Generic (PLEG): container finished" podID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerID="52945befc58a2caf2afa0272e9cd3d561780193a711a3905d8383c6941d7e8dc" exitCode=0 Nov 24 01:16:07 crc kubenswrapper[4755]: I1124 01:16:07.857434 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll47t" event={"ID":"601c4d50-2427-46ff-adc3-69c6960d43c2","Type":"ContainerDied","Data":"52945befc58a2caf2afa0272e9cd3d561780193a711a3905d8383c6941d7e8dc"} Nov 24 01:16:07 crc kubenswrapper[4755]: I1124 01:16:07.862910 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-899vs" event={"ID":"9a1a2052-a1d2-4b6c-bb90-1c51c13499da","Type":"ContainerStarted","Data":"1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e"} Nov 24 01:16:07 crc kubenswrapper[4755]: I1124 01:16:07.895079 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-899vs" podStartSLOduration=2.965176136 podStartE2EDuration="52.895060848s" podCreationTimestamp="2025-11-24 01:15:15 +0000 UTC" firstStartedPulling="2025-11-24 01:15:17.231675425 +0000 UTC m=+141.917740926" lastFinishedPulling="2025-11-24 01:16:07.161560137 +0000 UTC m=+191.847625638" observedRunningTime="2025-11-24 01:16:07.89414577 +0000 UTC m=+192.580211291" watchObservedRunningTime="2025-11-24 01:16:07.895060848 +0000 UTC m=+192.581126349" Nov 24 01:16:08 crc kubenswrapper[4755]: I1124 01:16:08.456307 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:16:08 crc kubenswrapper[4755]: I1124 01:16:08.621204 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pnvtd"] Nov 24 01:16:08 crc kubenswrapper[4755]: I1124 01:16:08.902125 4755 generic.go:334] "Generic (PLEG): container finished" podID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerID="c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a" exitCode=0 Nov 24 01:16:08 crc kubenswrapper[4755]: I1124 01:16:08.902327 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4879p" event={"ID":"78f7ae95-68af-4e1d-8d97-8e675d6d3323","Type":"ContainerDied","Data":"c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a"} Nov 24 01:16:09 crc kubenswrapper[4755]: I1124 01:16:09.910266 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerID="cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0" exitCode=0 Nov 24 01:16:09 crc kubenswrapper[4755]: I1124 01:16:09.910308 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khbxb" event={"ID":"b1028f34-b287-4775-a138-1ccdae47b7ee","Type":"ContainerDied","Data":"cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0"} Nov 24 01:16:10 crc kubenswrapper[4755]: I1124 01:16:10.916776 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll47t" event={"ID":"601c4d50-2427-46ff-adc3-69c6960d43c2","Type":"ContainerStarted","Data":"890eaac30a0fdd773814c7b19a5c7dec1eb0c6be468d72c41fc45382087559a6"} Nov 24 01:16:10 crc kubenswrapper[4755]: I1124 01:16:10.952567 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ll47t" podStartSLOduration=2.781860696 podStartE2EDuration="54.9525474s" podCreationTimestamp="2025-11-24 01:15:16 +0000 UTC" firstStartedPulling="2025-11-24 01:15:18.312968417 +0000 UTC m=+142.999033918" lastFinishedPulling="2025-11-24 01:16:10.483655121 +0000 UTC m=+195.169720622" observedRunningTime="2025-11-24 01:16:10.94928359 +0000 UTC m=+195.635349111" watchObservedRunningTime="2025-11-24 01:16:10.9525474 +0000 UTC m=+195.638612901" Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.257446 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nxdv"] Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.257738 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9nxdv" podUID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerName="registry-server" containerID="cri-o://95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678" gracePeriod=2 Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.775898 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.923993 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4879p" event={"ID":"78f7ae95-68af-4e1d-8d97-8e675d6d3323","Type":"ContainerStarted","Data":"bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82"} Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.929138 4755 generic.go:334] "Generic (PLEG): container finished" podID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerID="95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678" exitCode=0 Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.929204 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nxdv" event={"ID":"b0f48fab-6749-416a-b712-c1dcb42b45d1","Type":"ContainerDied","Data":"95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678"} Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.929231 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nxdv" event={"ID":"b0f48fab-6749-416a-b712-c1dcb42b45d1","Type":"ContainerDied","Data":"68609947ed03e2430a16e4dbb0b77fc50d82dc8e3e0768a7f4b5af07cb854c0c"} Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.929247 4755 scope.go:117] "RemoveContainer" containerID="95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678" Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.929362 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nxdv" Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.933028 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm9z9" event={"ID":"1c115205-515a-4f53-96ec-8559f1744b9b","Type":"ContainerStarted","Data":"bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648"} Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.935328 4755 generic.go:334] "Generic (PLEG): container finished" podID="21501e82-61af-435e-97f5-767cb357cbfb" containerID="f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d" exitCode=0 Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.935388 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fdzng" event={"ID":"21501e82-61af-435e-97f5-767cb357cbfb","Type":"ContainerDied","Data":"f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d"} Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.944260 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4879p" podStartSLOduration=2.617311785 podStartE2EDuration="52.944242373s" podCreationTimestamp="2025-11-24 01:15:19 +0000 UTC" firstStartedPulling="2025-11-24 01:15:20.509471845 +0000 UTC m=+145.195537346" lastFinishedPulling="2025-11-24 01:16:10.836402433 +0000 UTC m=+195.522467934" observedRunningTime="2025-11-24 01:16:11.940290515 +0000 UTC m=+196.626356036" watchObservedRunningTime="2025-11-24 01:16:11.944242373 +0000 UTC m=+196.630307874" Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.946188 4755 scope.go:117] "RemoveContainer" containerID="2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb" Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.957529 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-catalog-content\") pod \"b0f48fab-6749-416a-b712-c1dcb42b45d1\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.957619 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-utilities\") pod \"b0f48fab-6749-416a-b712-c1dcb42b45d1\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.957689 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bfk7\" (UniqueName: \"kubernetes.io/projected/b0f48fab-6749-416a-b712-c1dcb42b45d1-kube-api-access-2bfk7\") pod \"b0f48fab-6749-416a-b712-c1dcb42b45d1\" (UID: \"b0f48fab-6749-416a-b712-c1dcb42b45d1\") " Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.962695 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-utilities" (OuterVolumeSpecName: "utilities") pod "b0f48fab-6749-416a-b712-c1dcb42b45d1" (UID: "b0f48fab-6749-416a-b712-c1dcb42b45d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.966895 4755 scope.go:117] "RemoveContainer" containerID="172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793" Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.970783 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0f48fab-6749-416a-b712-c1dcb42b45d1-kube-api-access-2bfk7" (OuterVolumeSpecName: "kube-api-access-2bfk7") pod "b0f48fab-6749-416a-b712-c1dcb42b45d1" (UID: "b0f48fab-6749-416a-b712-c1dcb42b45d1"). InnerVolumeSpecName "kube-api-access-2bfk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:16:11 crc kubenswrapper[4755]: I1124 01:16:11.978399 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0f48fab-6749-416a-b712-c1dcb42b45d1" (UID: "b0f48fab-6749-416a-b712-c1dcb42b45d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.001119 4755 scope.go:117] "RemoveContainer" containerID="95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678" Nov 24 01:16:12 crc kubenswrapper[4755]: E1124 01:16:12.001554 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678\": container with ID starting with 95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678 not found: ID does not exist" containerID="95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678" Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.001701 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678"} err="failed to get container status \"95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678\": rpc error: code = NotFound desc = could not find container \"95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678\": container with ID starting with 95173780727bac3a5d89989c37c1515d52b53ce501e4ac1a58b163f20270c678 not found: ID does not exist" Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.001837 4755 scope.go:117] "RemoveContainer" containerID="2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb" Nov 24 01:16:12 crc kubenswrapper[4755]: E1124 01:16:12.002407 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb\": container with ID starting with 2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb not found: ID does not exist" containerID="2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb" Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.002579 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb"} err="failed to get container status \"2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb\": rpc error: code = NotFound desc = could not find container \"2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb\": container with ID starting with 2b537dbfff8f1817d249902e95f3bb29ac277d255655b5a07c9f7b4e3d6392eb not found: ID does not exist" Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.002760 4755 scope.go:117] "RemoveContainer" containerID="172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793" Nov 24 01:16:12 crc kubenswrapper[4755]: E1124 01:16:12.003368 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793\": container with ID starting with 172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793 not found: ID does not exist" containerID="172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793" Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.003391 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793"} err="failed to get container status \"172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793\": rpc error: code = NotFound desc = could not find container \"172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793\": container with ID starting with 172d89992728256a9ad8d8dd2d270f315d9e45aa30a1a2ef518754bb23e6a793 not found: ID does not exist" Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.058730 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.058771 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0f48fab-6749-416a-b712-c1dcb42b45d1-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.058783 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bfk7\" (UniqueName: \"kubernetes.io/projected/b0f48fab-6749-416a-b712-c1dcb42b45d1-kube-api-access-2bfk7\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.247153 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nxdv"] Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.250983 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nxdv"] Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.949581 4755 generic.go:334] "Generic (PLEG): container finished" podID="1c115205-515a-4f53-96ec-8559f1744b9b" containerID="bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648" exitCode=0 Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.949779 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm9z9" event={"ID":"1c115205-515a-4f53-96ec-8559f1744b9b","Type":"ContainerDied","Data":"bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648"} Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.954631 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fdzng" event={"ID":"21501e82-61af-435e-97f5-767cb357cbfb","Type":"ContainerStarted","Data":"8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59"} Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.957279 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khbxb" event={"ID":"b1028f34-b287-4775-a138-1ccdae47b7ee","Type":"ContainerStarted","Data":"478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c"} Nov 24 01:16:12 crc kubenswrapper[4755]: I1124 01:16:12.996950 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fdzng" podStartSLOduration=2.931822103 podStartE2EDuration="55.99692861s" podCreationTimestamp="2025-11-24 01:15:17 +0000 UTC" firstStartedPulling="2025-11-24 01:15:19.474825339 +0000 UTC m=+144.160890840" lastFinishedPulling="2025-11-24 01:16:12.539931846 +0000 UTC m=+197.225997347" observedRunningTime="2025-11-24 01:16:12.993738716 +0000 UTC m=+197.679804237" watchObservedRunningTime="2025-11-24 01:16:12.99692861 +0000 UTC m=+197.682994111" Nov 24 01:16:13 crc kubenswrapper[4755]: I1124 01:16:13.016062 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-khbxb" podStartSLOduration=3.680871556 podStartE2EDuration="58.016039441s" podCreationTimestamp="2025-11-24 01:15:15 +0000 UTC" firstStartedPulling="2025-11-24 01:15:17.209705204 +0000 UTC m=+141.895770705" lastFinishedPulling="2025-11-24 01:16:11.544873089 +0000 UTC m=+196.230938590" observedRunningTime="2025-11-24 01:16:13.015947198 +0000 UTC m=+197.702012709" watchObservedRunningTime="2025-11-24 01:16:13.016039441 +0000 UTC m=+197.702104952" Nov 24 01:16:13 crc kubenswrapper[4755]: I1124 01:16:13.964612 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm9z9" event={"ID":"1c115205-515a-4f53-96ec-8559f1744b9b","Type":"ContainerStarted","Data":"5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423"} Nov 24 01:16:13 crc kubenswrapper[4755]: I1124 01:16:13.986516 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rm9z9" podStartSLOduration=2.967162763 podStartE2EDuration="55.986497003s" podCreationTimestamp="2025-11-24 01:15:18 +0000 UTC" firstStartedPulling="2025-11-24 01:15:20.502976697 +0000 UTC m=+145.189042198" lastFinishedPulling="2025-11-24 01:16:13.522310937 +0000 UTC m=+198.208376438" observedRunningTime="2025-11-24 01:16:13.982336668 +0000 UTC m=+198.668402169" watchObservedRunningTime="2025-11-24 01:16:13.986497003 +0000 UTC m=+198.672562504" Nov 24 01:16:14 crc kubenswrapper[4755]: I1124 01:16:14.003031 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0f48fab-6749-416a-b712-c1dcb42b45d1" path="/var/lib/kubelet/pods/b0f48fab-6749-416a-b712-c1dcb42b45d1/volumes" Nov 24 01:16:15 crc kubenswrapper[4755]: I1124 01:16:15.854586 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:16:15 crc kubenswrapper[4755]: I1124 01:16:15.918294 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:16:16 crc kubenswrapper[4755]: I1124 01:16:16.324524 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:16:16 crc kubenswrapper[4755]: I1124 01:16:16.324871 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:16:16 crc kubenswrapper[4755]: I1124 01:16:16.396952 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:16:16 crc kubenswrapper[4755]: I1124 01:16:16.486878 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:16:16 crc kubenswrapper[4755]: I1124 01:16:16.486931 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:16:16 crc kubenswrapper[4755]: I1124 01:16:16.498396 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-899vs" Nov 24 01:16:16 crc kubenswrapper[4755]: I1124 01:16:16.498440 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-899vs" Nov 24 01:16:16 crc kubenswrapper[4755]: I1124 01:16:16.539846 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:16:16 crc kubenswrapper[4755]: I1124 01:16:16.570381 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-899vs" Nov 24 01:16:17 crc kubenswrapper[4755]: I1124 01:16:17.017004 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:16:17 crc kubenswrapper[4755]: I1124 01:16:17.022160 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-899vs" Nov 24 01:16:17 crc kubenswrapper[4755]: I1124 01:16:17.654225 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ll47t"] Nov 24 01:16:18 crc kubenswrapper[4755]: I1124 01:16:18.015590 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:16:18 crc kubenswrapper[4755]: I1124 01:16:18.015901 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:16:18 crc kubenswrapper[4755]: I1124 01:16:18.022515 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:16:18 crc kubenswrapper[4755]: I1124 01:16:18.054929 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:16:19 crc kubenswrapper[4755]: I1124 01:16:18.987328 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ll47t" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerName="registry-server" containerID="cri-o://890eaac30a0fdd773814c7b19a5c7dec1eb0c6be468d72c41fc45382087559a6" gracePeriod=2 Nov 24 01:16:19 crc kubenswrapper[4755]: I1124 01:16:19.019161 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:16:19 crc kubenswrapper[4755]: I1124 01:16:19.019227 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:16:19 crc kubenswrapper[4755]: I1124 01:16:19.056068 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:16:19 crc kubenswrapper[4755]: I1124 01:16:19.056677 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-899vs"] Nov 24 01:16:19 crc kubenswrapper[4755]: I1124 01:16:19.056972 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-899vs" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerName="registry-server" containerID="cri-o://1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e" gracePeriod=2 Nov 24 01:16:19 crc kubenswrapper[4755]: I1124 01:16:19.404840 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:16:19 crc kubenswrapper[4755]: I1124 01:16:19.404904 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:16:19 crc kubenswrapper[4755]: I1124 01:16:19.440193 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.037496 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.058246 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rm9z9" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" containerName="registry-server" probeResult="failure" output=< Nov 24 01:16:20 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Nov 24 01:16:20 crc kubenswrapper[4755]: > Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.554345 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-899vs" Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.694927 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-utilities\") pod \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.695295 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-catalog-content\") pod \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.695416 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5vxk\" (UniqueName: \"kubernetes.io/projected/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-kube-api-access-n5vxk\") pod \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\" (UID: \"9a1a2052-a1d2-4b6c-bb90-1c51c13499da\") " Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.695852 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-utilities" (OuterVolumeSpecName: "utilities") pod "9a1a2052-a1d2-4b6c-bb90-1c51c13499da" (UID: "9a1a2052-a1d2-4b6c-bb90-1c51c13499da"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.699790 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-kube-api-access-n5vxk" (OuterVolumeSpecName: "kube-api-access-n5vxk") pod "9a1a2052-a1d2-4b6c-bb90-1c51c13499da" (UID: "9a1a2052-a1d2-4b6c-bb90-1c51c13499da"). InnerVolumeSpecName "kube-api-access-n5vxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.752809 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a1a2052-a1d2-4b6c-bb90-1c51c13499da" (UID: "9a1a2052-a1d2-4b6c-bb90-1c51c13499da"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.796276 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.796322 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:20 crc kubenswrapper[4755]: I1124 01:16:20.796336 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5vxk\" (UniqueName: \"kubernetes.io/projected/9a1a2052-a1d2-4b6c-bb90-1c51c13499da-kube-api-access-n5vxk\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.000463 4755 generic.go:334] "Generic (PLEG): container finished" podID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerID="1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e" exitCode=0 Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.000509 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-899vs" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.000527 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-899vs" event={"ID":"9a1a2052-a1d2-4b6c-bb90-1c51c13499da","Type":"ContainerDied","Data":"1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e"} Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.000945 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-899vs" event={"ID":"9a1a2052-a1d2-4b6c-bb90-1c51c13499da","Type":"ContainerDied","Data":"2c83a83b4c4305d34c480212296cb79d14011c92e7799bd594a9d111ad1458a2"} Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.000963 4755 scope.go:117] "RemoveContainer" containerID="1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.003287 4755 generic.go:334] "Generic (PLEG): container finished" podID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerID="890eaac30a0fdd773814c7b19a5c7dec1eb0c6be468d72c41fc45382087559a6" exitCode=0 Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.003955 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll47t" event={"ID":"601c4d50-2427-46ff-adc3-69c6960d43c2","Type":"ContainerDied","Data":"890eaac30a0fdd773814c7b19a5c7dec1eb0c6be468d72c41fc45382087559a6"} Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.003989 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll47t" event={"ID":"601c4d50-2427-46ff-adc3-69c6960d43c2","Type":"ContainerDied","Data":"f1e4d297e2cb065dc4363fe1323b77414d9a1559128e3b6aac93fb87dbb3e3ea"} Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.004003 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1e4d297e2cb065dc4363fe1323b77414d9a1559128e3b6aac93fb87dbb3e3ea" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.005248 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.014794 4755 scope.go:117] "RemoveContainer" containerID="ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.039263 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-899vs"] Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.044276 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-899vs"] Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.045351 4755 scope.go:117] "RemoveContainer" containerID="f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.066405 4755 scope.go:117] "RemoveContainer" containerID="1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e" Nov 24 01:16:21 crc kubenswrapper[4755]: E1124 01:16:21.066874 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e\": container with ID starting with 1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e not found: ID does not exist" containerID="1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.066908 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e"} err="failed to get container status \"1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e\": rpc error: code = NotFound desc = could not find container \"1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e\": container with ID starting with 1b19adeb3e7b86f87a84122ef6b4be481a21f33326ec3b4a053b4e84cbe2e67e not found: ID does not exist" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.066936 4755 scope.go:117] "RemoveContainer" containerID="ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d" Nov 24 01:16:21 crc kubenswrapper[4755]: E1124 01:16:21.067316 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d\": container with ID starting with ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d not found: ID does not exist" containerID="ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.067335 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d"} err="failed to get container status \"ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d\": rpc error: code = NotFound desc = could not find container \"ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d\": container with ID starting with ca2498a83ef6446caada533d5573d091930427281f9364633e7dea96ad195f8d not found: ID does not exist" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.067348 4755 scope.go:117] "RemoveContainer" containerID="f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2" Nov 24 01:16:21 crc kubenswrapper[4755]: E1124 01:16:21.067617 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2\": container with ID starting with f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2 not found: ID does not exist" containerID="f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.067654 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2"} err="failed to get container status \"f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2\": rpc error: code = NotFound desc = could not find container \"f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2\": container with ID starting with f29a6b2c26040eb54bdafc4632714b5877df1476e0bf076283d587c8542cf5a2 not found: ID does not exist" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.099454 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh4g2\" (UniqueName: \"kubernetes.io/projected/601c4d50-2427-46ff-adc3-69c6960d43c2-kube-api-access-lh4g2\") pod \"601c4d50-2427-46ff-adc3-69c6960d43c2\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.099531 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-utilities\") pod \"601c4d50-2427-46ff-adc3-69c6960d43c2\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.099582 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-catalog-content\") pod \"601c4d50-2427-46ff-adc3-69c6960d43c2\" (UID: \"601c4d50-2427-46ff-adc3-69c6960d43c2\") " Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.101983 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-utilities" (OuterVolumeSpecName: "utilities") pod "601c4d50-2427-46ff-adc3-69c6960d43c2" (UID: "601c4d50-2427-46ff-adc3-69c6960d43c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.103324 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/601c4d50-2427-46ff-adc3-69c6960d43c2-kube-api-access-lh4g2" (OuterVolumeSpecName: "kube-api-access-lh4g2") pod "601c4d50-2427-46ff-adc3-69c6960d43c2" (UID: "601c4d50-2427-46ff-adc3-69c6960d43c2"). InnerVolumeSpecName "kube-api-access-lh4g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.146000 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "601c4d50-2427-46ff-adc3-69c6960d43c2" (UID: "601c4d50-2427-46ff-adc3-69c6960d43c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.201567 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.212024 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/601c4d50-2427-46ff-adc3-69c6960d43c2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:21 crc kubenswrapper[4755]: I1124 01:16:21.212086 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh4g2\" (UniqueName: \"kubernetes.io/projected/601c4d50-2427-46ff-adc3-69c6960d43c2-kube-api-access-lh4g2\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:22 crc kubenswrapper[4755]: I1124 01:16:22.002951 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" path="/var/lib/kubelet/pods/9a1a2052-a1d2-4b6c-bb90-1c51c13499da/volumes" Nov 24 01:16:22 crc kubenswrapper[4755]: I1124 01:16:22.007666 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ll47t" Nov 24 01:16:22 crc kubenswrapper[4755]: I1124 01:16:22.035498 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ll47t"] Nov 24 01:16:22 crc kubenswrapper[4755]: I1124 01:16:22.038661 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ll47t"] Nov 24 01:16:23 crc kubenswrapper[4755]: I1124 01:16:23.454729 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4879p"] Nov 24 01:16:23 crc kubenswrapper[4755]: I1124 01:16:23.455235 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4879p" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerName="registry-server" containerID="cri-o://bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82" gracePeriod=2 Nov 24 01:16:23 crc kubenswrapper[4755]: I1124 01:16:23.954316 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.007108 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" path="/var/lib/kubelet/pods/601c4d50-2427-46ff-adc3-69c6960d43c2/volumes" Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.518821 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.702974 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-catalog-content\") pod \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.703147 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pf299\" (UniqueName: \"kubernetes.io/projected/78f7ae95-68af-4e1d-8d97-8e675d6d3323-kube-api-access-pf299\") pod \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.703296 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-utilities\") pod \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\" (UID: \"78f7ae95-68af-4e1d-8d97-8e675d6d3323\") " Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.704016 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-utilities" (OuterVolumeSpecName: "utilities") pod "78f7ae95-68af-4e1d-8d97-8e675d6d3323" (UID: "78f7ae95-68af-4e1d-8d97-8e675d6d3323"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.708475 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78f7ae95-68af-4e1d-8d97-8e675d6d3323-kube-api-access-pf299" (OuterVolumeSpecName: "kube-api-access-pf299") pod "78f7ae95-68af-4e1d-8d97-8e675d6d3323" (UID: "78f7ae95-68af-4e1d-8d97-8e675d6d3323"). InnerVolumeSpecName "kube-api-access-pf299". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.793503 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78f7ae95-68af-4e1d-8d97-8e675d6d3323" (UID: "78f7ae95-68af-4e1d-8d97-8e675d6d3323"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.804736 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.804777 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78f7ae95-68af-4e1d-8d97-8e675d6d3323-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:24 crc kubenswrapper[4755]: I1124 01:16:24.804793 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pf299\" (UniqueName: \"kubernetes.io/projected/78f7ae95-68af-4e1d-8d97-8e675d6d3323-kube-api-access-pf299\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.028846 4755 generic.go:334] "Generic (PLEG): container finished" podID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerID="bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82" exitCode=0 Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.028894 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4879p" event={"ID":"78f7ae95-68af-4e1d-8d97-8e675d6d3323","Type":"ContainerDied","Data":"bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82"} Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.028923 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4879p" event={"ID":"78f7ae95-68af-4e1d-8d97-8e675d6d3323","Type":"ContainerDied","Data":"18f3383d7c75f4ebb68756b36dba9a50932f5a52016df256455373c1e9e9acea"} Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.028944 4755 scope.go:117] "RemoveContainer" containerID="bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82" Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.029069 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4879p" Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.062024 4755 scope.go:117] "RemoveContainer" containerID="c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a" Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.062409 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4879p"] Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.065843 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4879p"] Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.076988 4755 scope.go:117] "RemoveContainer" containerID="70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d" Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.101759 4755 scope.go:117] "RemoveContainer" containerID="bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82" Nov 24 01:16:25 crc kubenswrapper[4755]: E1124 01:16:25.105642 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82\": container with ID starting with bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82 not found: ID does not exist" containerID="bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82" Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.105684 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82"} err="failed to get container status \"bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82\": rpc error: code = NotFound desc = could not find container \"bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82\": container with ID starting with bf185f5036a9c01743b7cb4995ad0251a35c27b2ca74b1ff4261283821a95d82 not found: ID does not exist" Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.105713 4755 scope.go:117] "RemoveContainer" containerID="c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a" Nov 24 01:16:25 crc kubenswrapper[4755]: E1124 01:16:25.106160 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a\": container with ID starting with c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a not found: ID does not exist" containerID="c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a" Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.106202 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a"} err="failed to get container status \"c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a\": rpc error: code = NotFound desc = could not find container \"c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a\": container with ID starting with c2b1cf6905fff85240cb59bbb9aed4c86475ddb15fa9299fd59a989a2b86323a not found: ID does not exist" Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.106231 4755 scope.go:117] "RemoveContainer" containerID="70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d" Nov 24 01:16:25 crc kubenswrapper[4755]: E1124 01:16:25.106491 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d\": container with ID starting with 70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d not found: ID does not exist" containerID="70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d" Nov 24 01:16:25 crc kubenswrapper[4755]: I1124 01:16:25.106510 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d"} err="failed to get container status \"70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d\": rpc error: code = NotFound desc = could not find container \"70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d\": container with ID starting with 70b59942e165182a3d66f98900865d20138be2ffc5461bd50e42732ccd934f2d not found: ID does not exist" Nov 24 01:16:26 crc kubenswrapper[4755]: I1124 01:16:26.006010 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" path="/var/lib/kubelet/pods/78f7ae95-68af-4e1d-8d97-8e675d6d3323/volumes" Nov 24 01:16:29 crc kubenswrapper[4755]: I1124 01:16:29.060973 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:16:29 crc kubenswrapper[4755]: I1124 01:16:29.102217 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:16:33 crc kubenswrapper[4755]: I1124 01:16:33.295132 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:16:33 crc kubenswrapper[4755]: I1124 01:16:33.295194 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:16:33 crc kubenswrapper[4755]: I1124 01:16:33.295237 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:16:33 crc kubenswrapper[4755]: I1124 01:16:33.295772 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:16:33 crc kubenswrapper[4755]: I1124 01:16:33.295828 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245" gracePeriod=600 Nov 24 01:16:33 crc kubenswrapper[4755]: I1124 01:16:33.646856 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" podUID="2b4041a5-964c-4bd3-8723-a45e5d6ca9be" containerName="oauth-openshift" containerID="cri-o://bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731" gracePeriod=15 Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.052173 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.084727 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245" exitCode=0 Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.084810 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245"} Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.084879 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"40fb596ee2efd1749e3b689faff00f59f67b4ea23102fca97de18ffa0a4dd608"} Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.086136 4755 generic.go:334] "Generic (PLEG): container finished" podID="2b4041a5-964c-4bd3-8723-a45e5d6ca9be" containerID="bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731" exitCode=0 Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.086174 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" event={"ID":"2b4041a5-964c-4bd3-8723-a45e5d6ca9be","Type":"ContainerDied","Data":"bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731"} Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.086183 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.086200 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pnvtd" event={"ID":"2b4041a5-964c-4bd3-8723-a45e5d6ca9be","Type":"ContainerDied","Data":"5d9e7fac9e75cc784bcb96cd3bb86cd8cdc1d9cd47d3ac30a8a99c676add9262"} Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.086223 4755 scope.go:117] "RemoveContainer" containerID="bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092396 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-54bd787995-k4sft"] Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092721 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerName="extract-content" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092743 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerName="extract-content" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092761 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerName="extract-utilities" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092769 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerName="extract-utilities" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092780 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerName="extract-utilities" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092788 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerName="extract-utilities" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092796 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerName="extract-utilities" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092804 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerName="extract-utilities" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092812 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d817b373-6b9c-49b1-9918-16279916b86a" containerName="pruner" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092822 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="d817b373-6b9c-49b1-9918-16279916b86a" containerName="pruner" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092831 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092839 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092851 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092859 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092870 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerName="extract-utilities" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092877 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerName="extract-utilities" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092885 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerName="extract-content" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092893 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerName="extract-content" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092903 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerName="extract-content" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092910 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerName="extract-content" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092923 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b4041a5-964c-4bd3-8723-a45e5d6ca9be" containerName="oauth-openshift" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092932 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b4041a5-964c-4bd3-8723-a45e5d6ca9be" containerName="oauth-openshift" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092943 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092950 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092963 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092971 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.092980 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerName="extract-content" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.092987 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerName="extract-content" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.093095 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="d817b373-6b9c-49b1-9918-16279916b86a" containerName="pruner" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.093107 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b4041a5-964c-4bd3-8723-a45e5d6ca9be" containerName="oauth-openshift" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.093121 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="78f7ae95-68af-4e1d-8d97-8e675d6d3323" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.093134 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0f48fab-6749-416a-b712-c1dcb42b45d1" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.093144 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a1a2052-a1d2-4b6c-bb90-1c51c13499da" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.093153 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="601c4d50-2427-46ff-adc3-69c6960d43c2" containerName="registry-server" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.093647 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.101781 4755 scope.go:117] "RemoveContainer" containerID="bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731" Nov 24 01:16:34 crc kubenswrapper[4755]: E1124 01:16:34.102332 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731\": container with ID starting with bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731 not found: ID does not exist" containerID="bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.102372 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731"} err="failed to get container status \"bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731\": rpc error: code = NotFound desc = could not find container \"bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731\": container with ID starting with bfe292cd297c78c786dca981e700c4e7e272ea7c7abcd06fe92e3526f3f5a731 not found: ID does not exist" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.110311 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-54bd787995-k4sft"] Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.183805 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-ocp-branding-template\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.183857 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-service-ca\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.183883 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5wzj\" (UniqueName: \"kubernetes.io/projected/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-kube-api-access-r5wzj\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.183908 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-dir\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.183959 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-cliconfig\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.183981 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-router-certs\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184019 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-idp-0-file-data\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184050 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-policies\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184098 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-error\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184119 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-session\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184139 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-trusted-ca-bundle\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184164 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-provider-selection\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184216 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-login\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184266 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-serving-cert\") pod \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\" (UID: \"2b4041a5-964c-4bd3-8723-a45e5d6ca9be\") " Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184400 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184427 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-template-login\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184455 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-service-ca\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184477 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c79kw\" (UniqueName: \"kubernetes.io/projected/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-kube-api-access-c79kw\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184505 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-audit-policies\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184525 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184578 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-audit-dir\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184598 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184654 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-router-certs\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184697 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-template-error\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184748 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184778 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184800 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-session\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184824 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184876 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.184953 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.185266 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.185708 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.186878 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.189874 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.190207 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.190371 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-kube-api-access-r5wzj" (OuterVolumeSpecName: "kube-api-access-r5wzj") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "kube-api-access-r5wzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.190441 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.190507 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.190543 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.190974 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.191254 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.191439 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "2b4041a5-964c-4bd3-8723-a45e5d6ca9be" (UID: "2b4041a5-964c-4bd3-8723-a45e5d6ca9be"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286218 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-service-ca\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286263 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c79kw\" (UniqueName: \"kubernetes.io/projected/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-kube-api-access-c79kw\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286287 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-audit-policies\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286307 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286340 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-audit-dir\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286358 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286378 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-router-certs\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286398 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-template-error\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286428 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286448 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286465 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-session\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286481 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286499 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286515 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-template-login\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286551 4755 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286562 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286571 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286580 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286591 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286616 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286630 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286640 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286649 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286657 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5wzj\" (UniqueName: \"kubernetes.io/projected/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-kube-api-access-r5wzj\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286668 4755 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286676 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286685 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.286693 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b4041a5-964c-4bd3-8723-a45e5d6ca9be-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.287307 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-service-ca\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.287823 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.288349 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-audit-dir\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.288694 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-audit-policies\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.289824 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-router-certs\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.289853 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.290097 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.290717 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.290795 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-session\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.290803 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-template-login\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.290921 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.291216 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-user-template-error\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.291426 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.305289 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c79kw\" (UniqueName: \"kubernetes.io/projected/4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf-kube-api-access-c79kw\") pod \"oauth-openshift-54bd787995-k4sft\" (UID: \"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf\") " pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.405357 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.418086 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pnvtd"] Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.432211 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pnvtd"] Nov 24 01:16:34 crc kubenswrapper[4755]: I1124 01:16:34.597749 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-54bd787995-k4sft"] Nov 24 01:16:35 crc kubenswrapper[4755]: I1124 01:16:35.094699 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" event={"ID":"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf","Type":"ContainerStarted","Data":"99ba91d57be8a45f78f494a5d7c9b6c7131096381ed345980ec46b9add1e0489"} Nov 24 01:16:35 crc kubenswrapper[4755]: I1124 01:16:35.094979 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" event={"ID":"4e1dadd4-f0fe-49a7-bbc3-294e4b9f3ebf","Type":"ContainerStarted","Data":"a82d0441897054d2197dd8a63c0029c7f2cdde0d790907842ef5e954c402ed27"} Nov 24 01:16:35 crc kubenswrapper[4755]: I1124 01:16:35.094994 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:35 crc kubenswrapper[4755]: I1124 01:16:35.363004 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" Nov 24 01:16:35 crc kubenswrapper[4755]: I1124 01:16:35.379125 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-54bd787995-k4sft" podStartSLOduration=27.379088751 podStartE2EDuration="27.379088751s" podCreationTimestamp="2025-11-24 01:16:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:16:35.120984683 +0000 UTC m=+219.807050194" watchObservedRunningTime="2025-11-24 01:16:35.379088751 +0000 UTC m=+220.065154252" Nov 24 01:16:36 crc kubenswrapper[4755]: I1124 01:16:36.002624 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b4041a5-964c-4bd3-8723-a45e5d6ca9be" path="/var/lib/kubelet/pods/2b4041a5-964c-4bd3-8723-a45e5d6ca9be/volumes" Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.631358 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-khbxb"] Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.632209 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-khbxb" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerName="registry-server" containerID="cri-o://478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c" gracePeriod=30 Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.650451 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-57wzn"] Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.650737 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-57wzn" podUID="9806f604-0abe-4de1-af45-78232744bc87" containerName="registry-server" containerID="cri-o://997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049" gracePeriod=30 Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.654900 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-x78dk"] Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.657456 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" podUID="7c537b75-83c8-4250-aae0-cacbdb94445f" containerName="marketplace-operator" containerID="cri-o://46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab" gracePeriod=30 Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.663407 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fdzng"] Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.663689 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fdzng" podUID="21501e82-61af-435e-97f5-767cb357cbfb" containerName="registry-server" containerID="cri-o://8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59" gracePeriod=30 Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.670041 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bs6fj"] Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.671172 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.673223 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rm9z9"] Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.673517 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rm9z9" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" containerName="registry-server" containerID="cri-o://5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423" gracePeriod=30 Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.723946 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bs6fj"] Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.804750 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/be6e8d7e-1c19-449b-a7f5-c104a92edf7c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bs6fj\" (UID: \"be6e8d7e-1c19-449b-a7f5-c104a92edf7c\") " pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.804856 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/be6e8d7e-1c19-449b-a7f5-c104a92edf7c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bs6fj\" (UID: \"be6e8d7e-1c19-449b-a7f5-c104a92edf7c\") " pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.804915 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdr8b\" (UniqueName: \"kubernetes.io/projected/be6e8d7e-1c19-449b-a7f5-c104a92edf7c-kube-api-access-qdr8b\") pod \"marketplace-operator-79b997595-bs6fj\" (UID: \"be6e8d7e-1c19-449b-a7f5-c104a92edf7c\") " pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.905779 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdr8b\" (UniqueName: \"kubernetes.io/projected/be6e8d7e-1c19-449b-a7f5-c104a92edf7c-kube-api-access-qdr8b\") pod \"marketplace-operator-79b997595-bs6fj\" (UID: \"be6e8d7e-1c19-449b-a7f5-c104a92edf7c\") " pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.905877 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/be6e8d7e-1c19-449b-a7f5-c104a92edf7c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bs6fj\" (UID: \"be6e8d7e-1c19-449b-a7f5-c104a92edf7c\") " pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.905918 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/be6e8d7e-1c19-449b-a7f5-c104a92edf7c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bs6fj\" (UID: \"be6e8d7e-1c19-449b-a7f5-c104a92edf7c\") " pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.907553 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/be6e8d7e-1c19-449b-a7f5-c104a92edf7c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bs6fj\" (UID: \"be6e8d7e-1c19-449b-a7f5-c104a92edf7c\") " pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.922384 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/be6e8d7e-1c19-449b-a7f5-c104a92edf7c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bs6fj\" (UID: \"be6e8d7e-1c19-449b-a7f5-c104a92edf7c\") " pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:50 crc kubenswrapper[4755]: I1124 01:16:50.924896 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdr8b\" (UniqueName: \"kubernetes.io/projected/be6e8d7e-1c19-449b-a7f5-c104a92edf7c-kube-api-access-qdr8b\") pod \"marketplace-operator-79b997595-bs6fj\" (UID: \"be6e8d7e-1c19-449b-a7f5-c104a92edf7c\") " pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.029964 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.048739 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.110528 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.131398 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.132350 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.139736 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.212203 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-catalog-content\") pod \"9806f604-0abe-4de1-af45-78232744bc87\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.212291 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djxdq\" (UniqueName: \"kubernetes.io/projected/9806f604-0abe-4de1-af45-78232744bc87-kube-api-access-djxdq\") pod \"9806f604-0abe-4de1-af45-78232744bc87\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.212313 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-utilities\") pod \"9806f604-0abe-4de1-af45-78232744bc87\" (UID: \"9806f604-0abe-4de1-af45-78232744bc87\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.212343 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-catalog-content\") pod \"b1028f34-b287-4775-a138-1ccdae47b7ee\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.212377 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-utilities\") pod \"b1028f34-b287-4775-a138-1ccdae47b7ee\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.212417 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfrt6\" (UniqueName: \"kubernetes.io/projected/b1028f34-b287-4775-a138-1ccdae47b7ee-kube-api-access-qfrt6\") pod \"b1028f34-b287-4775-a138-1ccdae47b7ee\" (UID: \"b1028f34-b287-4775-a138-1ccdae47b7ee\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.214360 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-utilities" (OuterVolumeSpecName: "utilities") pod "9806f604-0abe-4de1-af45-78232744bc87" (UID: "9806f604-0abe-4de1-af45-78232744bc87"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.215007 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-utilities" (OuterVolumeSpecName: "utilities") pod "b1028f34-b287-4775-a138-1ccdae47b7ee" (UID: "b1028f34-b287-4775-a138-1ccdae47b7ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.218588 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9806f604-0abe-4de1-af45-78232744bc87-kube-api-access-djxdq" (OuterVolumeSpecName: "kube-api-access-djxdq") pod "9806f604-0abe-4de1-af45-78232744bc87" (UID: "9806f604-0abe-4de1-af45-78232744bc87"). InnerVolumeSpecName "kube-api-access-djxdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.218673 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1028f34-b287-4775-a138-1ccdae47b7ee-kube-api-access-qfrt6" (OuterVolumeSpecName: "kube-api-access-qfrt6") pod "b1028f34-b287-4775-a138-1ccdae47b7ee" (UID: "b1028f34-b287-4775-a138-1ccdae47b7ee"). InnerVolumeSpecName "kube-api-access-qfrt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.269311 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9806f604-0abe-4de1-af45-78232744bc87" (UID: "9806f604-0abe-4de1-af45-78232744bc87"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.269943 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b1028f34-b287-4775-a138-1ccdae47b7ee" (UID: "b1028f34-b287-4775-a138-1ccdae47b7ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314093 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-operator-metrics\") pod \"7c537b75-83c8-4250-aae0-cacbdb94445f\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314150 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-trusted-ca\") pod \"7c537b75-83c8-4250-aae0-cacbdb94445f\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314186 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7phb4\" (UniqueName: \"kubernetes.io/projected/1c115205-515a-4f53-96ec-8559f1744b9b-kube-api-access-7phb4\") pod \"1c115205-515a-4f53-96ec-8559f1744b9b\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314210 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-utilities\") pod \"21501e82-61af-435e-97f5-767cb357cbfb\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314241 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdvcj\" (UniqueName: \"kubernetes.io/projected/7c537b75-83c8-4250-aae0-cacbdb94445f-kube-api-access-sdvcj\") pod \"7c537b75-83c8-4250-aae0-cacbdb94445f\" (UID: \"7c537b75-83c8-4250-aae0-cacbdb94445f\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314269 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-catalog-content\") pod \"1c115205-515a-4f53-96ec-8559f1744b9b\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314300 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-utilities\") pod \"1c115205-515a-4f53-96ec-8559f1744b9b\" (UID: \"1c115205-515a-4f53-96ec-8559f1744b9b\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314318 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-catalog-content\") pod \"21501e82-61af-435e-97f5-767cb357cbfb\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314354 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t56kr\" (UniqueName: \"kubernetes.io/projected/21501e82-61af-435e-97f5-767cb357cbfb-kube-api-access-t56kr\") pod \"21501e82-61af-435e-97f5-767cb357cbfb\" (UID: \"21501e82-61af-435e-97f5-767cb357cbfb\") " Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314571 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314583 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1028f34-b287-4775-a138-1ccdae47b7ee-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314595 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfrt6\" (UniqueName: \"kubernetes.io/projected/b1028f34-b287-4775-a138-1ccdae47b7ee-kube-api-access-qfrt6\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314625 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314634 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djxdq\" (UniqueName: \"kubernetes.io/projected/9806f604-0abe-4de1-af45-78232744bc87-kube-api-access-djxdq\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.314643 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9806f604-0abe-4de1-af45-78232744bc87-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.315943 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "7c537b75-83c8-4250-aae0-cacbdb94445f" (UID: "7c537b75-83c8-4250-aae0-cacbdb94445f"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.316041 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-utilities" (OuterVolumeSpecName: "utilities") pod "21501e82-61af-435e-97f5-767cb357cbfb" (UID: "21501e82-61af-435e-97f5-767cb357cbfb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.316198 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-utilities" (OuterVolumeSpecName: "utilities") pod "1c115205-515a-4f53-96ec-8559f1744b9b" (UID: "1c115205-515a-4f53-96ec-8559f1744b9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.317966 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "7c537b75-83c8-4250-aae0-cacbdb94445f" (UID: "7c537b75-83c8-4250-aae0-cacbdb94445f"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.318036 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c115205-515a-4f53-96ec-8559f1744b9b-kube-api-access-7phb4" (OuterVolumeSpecName: "kube-api-access-7phb4") pod "1c115205-515a-4f53-96ec-8559f1744b9b" (UID: "1c115205-515a-4f53-96ec-8559f1744b9b"). InnerVolumeSpecName "kube-api-access-7phb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.318072 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c537b75-83c8-4250-aae0-cacbdb94445f-kube-api-access-sdvcj" (OuterVolumeSpecName: "kube-api-access-sdvcj") pod "7c537b75-83c8-4250-aae0-cacbdb94445f" (UID: "7c537b75-83c8-4250-aae0-cacbdb94445f"). InnerVolumeSpecName "kube-api-access-sdvcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.318447 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21501e82-61af-435e-97f5-767cb357cbfb-kube-api-access-t56kr" (OuterVolumeSpecName: "kube-api-access-t56kr") pod "21501e82-61af-435e-97f5-767cb357cbfb" (UID: "21501e82-61af-435e-97f5-767cb357cbfb"). InnerVolumeSpecName "kube-api-access-t56kr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.332141 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "21501e82-61af-435e-97f5-767cb357cbfb" (UID: "21501e82-61af-435e-97f5-767cb357cbfb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.366513 4755 generic.go:334] "Generic (PLEG): container finished" podID="1c115205-515a-4f53-96ec-8559f1744b9b" containerID="5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423" exitCode=0 Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.366563 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rm9z9" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.366581 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm9z9" event={"ID":"1c115205-515a-4f53-96ec-8559f1744b9b","Type":"ContainerDied","Data":"5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423"} Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.366643 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rm9z9" event={"ID":"1c115205-515a-4f53-96ec-8559f1744b9b","Type":"ContainerDied","Data":"1976d6b561a993d495abb231fe3e74fc12ea48be71a316524125b5f9fd22e681"} Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.366663 4755 scope.go:117] "RemoveContainer" containerID="5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.367587 4755 generic.go:334] "Generic (PLEG): container finished" podID="7c537b75-83c8-4250-aae0-cacbdb94445f" containerID="46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab" exitCode=0 Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.367649 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" event={"ID":"7c537b75-83c8-4250-aae0-cacbdb94445f","Type":"ContainerDied","Data":"46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab"} Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.367687 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" event={"ID":"7c537b75-83c8-4250-aae0-cacbdb94445f","Type":"ContainerDied","Data":"1b304e9a19313c3668d09b2aeae6e6f78fb2f043ff3ae5ba8b979e474877ffba"} Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.367745 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-x78dk" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.371089 4755 generic.go:334] "Generic (PLEG): container finished" podID="21501e82-61af-435e-97f5-767cb357cbfb" containerID="8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59" exitCode=0 Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.371151 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fdzng" event={"ID":"21501e82-61af-435e-97f5-767cb357cbfb","Type":"ContainerDied","Data":"8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59"} Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.371180 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fdzng" event={"ID":"21501e82-61af-435e-97f5-767cb357cbfb","Type":"ContainerDied","Data":"79126ab9b84e434e46e2c88609ad7bbe2b998ea393eccb7db51a806185eff09c"} Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.371243 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fdzng" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.381738 4755 generic.go:334] "Generic (PLEG): container finished" podID="9806f604-0abe-4de1-af45-78232744bc87" containerID="997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049" exitCode=0 Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.381839 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57wzn" event={"ID":"9806f604-0abe-4de1-af45-78232744bc87","Type":"ContainerDied","Data":"997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049"} Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.381901 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-57wzn" event={"ID":"9806f604-0abe-4de1-af45-78232744bc87","Type":"ContainerDied","Data":"b2a71abbfd85fbe88e9032b647b8ec4cb7f7a72be2e3cf44eb861e1334e25fc6"} Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.381861 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-57wzn" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.384648 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerID="478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c" exitCode=0 Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.384689 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khbxb" event={"ID":"b1028f34-b287-4775-a138-1ccdae47b7ee","Type":"ContainerDied","Data":"478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c"} Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.384740 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khbxb" event={"ID":"b1028f34-b287-4775-a138-1ccdae47b7ee","Type":"ContainerDied","Data":"1f8b7990f781049540a44ffed8399b00a9cdbdc6a5c880072e2158a2fce8643b"} Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.384692 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-khbxb" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.398620 4755 scope.go:117] "RemoveContainer" containerID="bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.401698 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c115205-515a-4f53-96ec-8559f1744b9b" (UID: "1c115205-515a-4f53-96ec-8559f1744b9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.402202 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-x78dk"] Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.404620 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-x78dk"] Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.413961 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fdzng"] Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.415560 4755 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.415589 4755 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7c537b75-83c8-4250-aae0-cacbdb94445f-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.415616 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.415629 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7phb4\" (UniqueName: \"kubernetes.io/projected/1c115205-515a-4f53-96ec-8559f1744b9b-kube-api-access-7phb4\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.415642 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdvcj\" (UniqueName: \"kubernetes.io/projected/7c537b75-83c8-4250-aae0-cacbdb94445f-kube-api-access-sdvcj\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.415653 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.415664 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c115205-515a-4f53-96ec-8559f1744b9b-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.415673 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21501e82-61af-435e-97f5-767cb357cbfb-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.415684 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t56kr\" (UniqueName: \"kubernetes.io/projected/21501e82-61af-435e-97f5-767cb357cbfb-kube-api-access-t56kr\") on node \"crc\" DevicePath \"\"" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.415943 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fdzng"] Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.425630 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-khbxb"] Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.428211 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-khbxb"] Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.442025 4755 scope.go:117] "RemoveContainer" containerID="67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.448614 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-57wzn"] Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.450776 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-57wzn"] Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.455483 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bs6fj"] Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.458811 4755 scope.go:117] "RemoveContainer" containerID="5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.459246 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423\": container with ID starting with 5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423 not found: ID does not exist" containerID="5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.459281 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423"} err="failed to get container status \"5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423\": rpc error: code = NotFound desc = could not find container \"5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423\": container with ID starting with 5bb4131c4cddaa70806b3fddd1600fbe0d2a21c6dad29ab90bfade1b6ed4c423 not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.459307 4755 scope.go:117] "RemoveContainer" containerID="bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.459898 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648\": container with ID starting with bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648 not found: ID does not exist" containerID="bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.459935 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648"} err="failed to get container status \"bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648\": rpc error: code = NotFound desc = could not find container \"bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648\": container with ID starting with bd5d495c9057cc5861429bf3544ec1476f4d3e0997fc2567bb790352b1bba648 not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.459964 4755 scope.go:117] "RemoveContainer" containerID="67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.460238 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e\": container with ID starting with 67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e not found: ID does not exist" containerID="67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.460264 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e"} err="failed to get container status \"67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e\": rpc error: code = NotFound desc = could not find container \"67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e\": container with ID starting with 67824b752acc0dd67906752026d1c047e9dc681a6437dae9eb8c6513a1d8e30e not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.460283 4755 scope.go:117] "RemoveContainer" containerID="46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab" Nov 24 01:16:51 crc kubenswrapper[4755]: W1124 01:16:51.462470 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe6e8d7e_1c19_449b_a7f5_c104a92edf7c.slice/crio-7e6ae2c496a184fbf8ab44d8bd9a1079645da94b5ca0197bd0bb250b1e8c61b3 WatchSource:0}: Error finding container 7e6ae2c496a184fbf8ab44d8bd9a1079645da94b5ca0197bd0bb250b1e8c61b3: Status 404 returned error can't find the container with id 7e6ae2c496a184fbf8ab44d8bd9a1079645da94b5ca0197bd0bb250b1e8c61b3 Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.475276 4755 scope.go:117] "RemoveContainer" containerID="46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.475680 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab\": container with ID starting with 46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab not found: ID does not exist" containerID="46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.475720 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab"} err="failed to get container status \"46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab\": rpc error: code = NotFound desc = could not find container \"46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab\": container with ID starting with 46af39ffaa246e5d93b62a6a0cb30b261c05b20d89221eb1375672cbed64a6ab not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.475744 4755 scope.go:117] "RemoveContainer" containerID="8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.489705 4755 scope.go:117] "RemoveContainer" containerID="f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.501194 4755 scope.go:117] "RemoveContainer" containerID="60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.524301 4755 scope.go:117] "RemoveContainer" containerID="8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.524662 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59\": container with ID starting with 8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59 not found: ID does not exist" containerID="8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.524702 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59"} err="failed to get container status \"8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59\": rpc error: code = NotFound desc = could not find container \"8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59\": container with ID starting with 8784e50b35039b04935df96460c3fab7585bb9a36562098274d4f3545850cf59 not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.524728 4755 scope.go:117] "RemoveContainer" containerID="f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.525046 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d\": container with ID starting with f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d not found: ID does not exist" containerID="f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.525104 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d"} err="failed to get container status \"f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d\": rpc error: code = NotFound desc = could not find container \"f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d\": container with ID starting with f823c00279633f3f72d896f8fb87d5291de357b9a3b970f20be98e2a6ad3cf1d not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.525135 4755 scope.go:117] "RemoveContainer" containerID="60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.525589 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915\": container with ID starting with 60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915 not found: ID does not exist" containerID="60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.525633 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915"} err="failed to get container status \"60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915\": rpc error: code = NotFound desc = could not find container \"60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915\": container with ID starting with 60c06db75637535737eefec23338f4be773b19212290af9fcd95e6496750a915 not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.525651 4755 scope.go:117] "RemoveContainer" containerID="997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.540797 4755 scope.go:117] "RemoveContainer" containerID="0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.557817 4755 scope.go:117] "RemoveContainer" containerID="e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.572333 4755 scope.go:117] "RemoveContainer" containerID="997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.572947 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049\": container with ID starting with 997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049 not found: ID does not exist" containerID="997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.572987 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049"} err="failed to get container status \"997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049\": rpc error: code = NotFound desc = could not find container \"997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049\": container with ID starting with 997e893021a80ffdc4e0c5a4b5f60dbc24cd129f170bb95771f37fa3b4e35049 not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.573016 4755 scope.go:117] "RemoveContainer" containerID="0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.574988 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9\": container with ID starting with 0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9 not found: ID does not exist" containerID="0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.575101 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9"} err="failed to get container status \"0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9\": rpc error: code = NotFound desc = could not find container \"0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9\": container with ID starting with 0d12845b64ea6b73edd3701a08ab5e49ba048ad3523f1ef8b990777fe23911d9 not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.575132 4755 scope.go:117] "RemoveContainer" containerID="e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.575640 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84\": container with ID starting with e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84 not found: ID does not exist" containerID="e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.575687 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84"} err="failed to get container status \"e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84\": rpc error: code = NotFound desc = could not find container \"e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84\": container with ID starting with e88f092a227be01624d30cf2bac69784b424a7e86cc6e00992ba2fff644ecb84 not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.575719 4755 scope.go:117] "RemoveContainer" containerID="478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.589054 4755 scope.go:117] "RemoveContainer" containerID="cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.603374 4755 scope.go:117] "RemoveContainer" containerID="71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.625309 4755 scope.go:117] "RemoveContainer" containerID="478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.625720 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c\": container with ID starting with 478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c not found: ID does not exist" containerID="478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.625757 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c"} err="failed to get container status \"478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c\": rpc error: code = NotFound desc = could not find container \"478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c\": container with ID starting with 478d6bcbe108b25feca4d8292c4388fe5d8a242bec251d456afe064e03ac406c not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.625783 4755 scope.go:117] "RemoveContainer" containerID="cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.626083 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0\": container with ID starting with cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0 not found: ID does not exist" containerID="cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.626109 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0"} err="failed to get container status \"cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0\": rpc error: code = NotFound desc = could not find container \"cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0\": container with ID starting with cf695e0bfad7e7d72b4b7cc0a820d85b5f387e04ded0d0a9ad2405f7c6d1c5b0 not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.626126 4755 scope.go:117] "RemoveContainer" containerID="71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330" Nov 24 01:16:51 crc kubenswrapper[4755]: E1124 01:16:51.626404 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330\": container with ID starting with 71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330 not found: ID does not exist" containerID="71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.626429 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330"} err="failed to get container status \"71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330\": rpc error: code = NotFound desc = could not find container \"71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330\": container with ID starting with 71404116f0aac7891b416dc2ef5d2d07449ed6aa353664c088d57fabdb7cb330 not found: ID does not exist" Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.690644 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rm9z9"] Nov 24 01:16:51 crc kubenswrapper[4755]: I1124 01:16:51.694449 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rm9z9"] Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.003217 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" path="/var/lib/kubelet/pods/1c115205-515a-4f53-96ec-8559f1744b9b/volumes" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.003839 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21501e82-61af-435e-97f5-767cb357cbfb" path="/var/lib/kubelet/pods/21501e82-61af-435e-97f5-767cb357cbfb/volumes" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.004414 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c537b75-83c8-4250-aae0-cacbdb94445f" path="/var/lib/kubelet/pods/7c537b75-83c8-4250-aae0-cacbdb94445f/volumes" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.004949 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9806f604-0abe-4de1-af45-78232744bc87" path="/var/lib/kubelet/pods/9806f604-0abe-4de1-af45-78232744bc87/volumes" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.005482 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" path="/var/lib/kubelet/pods/b1028f34-b287-4775-a138-1ccdae47b7ee/volumes" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.394698 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" event={"ID":"be6e8d7e-1c19-449b-a7f5-c104a92edf7c","Type":"ContainerStarted","Data":"bce4c869e4840d74a30cc0a28bfb10ed64eca5a006ed78bb5c90b1d5a639d8bf"} Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.394746 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" event={"ID":"be6e8d7e-1c19-449b-a7f5-c104a92edf7c","Type":"ContainerStarted","Data":"7e6ae2c496a184fbf8ab44d8bd9a1079645da94b5ca0197bd0bb250b1e8c61b3"} Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.394863 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.398851 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.410700 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-bs6fj" podStartSLOduration=2.410683036 podStartE2EDuration="2.410683036s" podCreationTimestamp="2025-11-24 01:16:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:16:52.409429576 +0000 UTC m=+237.095495077" watchObservedRunningTime="2025-11-24 01:16:52.410683036 +0000 UTC m=+237.096748537" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648083 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vjn27"] Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648806 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9806f604-0abe-4de1-af45-78232744bc87" containerName="extract-content" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648826 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9806f604-0abe-4de1-af45-78232744bc87" containerName="extract-content" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648838 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21501e82-61af-435e-97f5-767cb357cbfb" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648844 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="21501e82-61af-435e-97f5-767cb357cbfb" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648851 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9806f604-0abe-4de1-af45-78232744bc87" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648856 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9806f604-0abe-4de1-af45-78232744bc87" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648864 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21501e82-61af-435e-97f5-767cb357cbfb" containerName="extract-utilities" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648870 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="21501e82-61af-435e-97f5-767cb357cbfb" containerName="extract-utilities" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648877 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648882 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648889 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" containerName="extract-content" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648895 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" containerName="extract-content" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648905 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21501e82-61af-435e-97f5-767cb357cbfb" containerName="extract-content" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648911 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="21501e82-61af-435e-97f5-767cb357cbfb" containerName="extract-content" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648919 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerName="extract-utilities" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648925 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerName="extract-utilities" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648933 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648939 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648950 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" containerName="extract-utilities" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648955 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" containerName="extract-utilities" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648963 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c537b75-83c8-4250-aae0-cacbdb94445f" containerName="marketplace-operator" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648969 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c537b75-83c8-4250-aae0-cacbdb94445f" containerName="marketplace-operator" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648979 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9806f604-0abe-4de1-af45-78232744bc87" containerName="extract-utilities" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648985 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9806f604-0abe-4de1-af45-78232744bc87" containerName="extract-utilities" Nov 24 01:16:52 crc kubenswrapper[4755]: E1124 01:16:52.648992 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerName="extract-content" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.648998 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerName="extract-content" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.649079 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1028f34-b287-4775-a138-1ccdae47b7ee" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.649087 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="21501e82-61af-435e-97f5-767cb357cbfb" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.649097 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c537b75-83c8-4250-aae0-cacbdb94445f" containerName="marketplace-operator" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.649106 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c115205-515a-4f53-96ec-8559f1744b9b" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.649114 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="9806f604-0abe-4de1-af45-78232744bc87" containerName="registry-server" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.649747 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.651355 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.658496 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vjn27"] Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.841868 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/098cf4e9-8a23-42d7-ae62-497aa11abcca-catalog-content\") pod \"redhat-marketplace-vjn27\" (UID: \"098cf4e9-8a23-42d7-ae62-497aa11abcca\") " pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.841937 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/098cf4e9-8a23-42d7-ae62-497aa11abcca-utilities\") pod \"redhat-marketplace-vjn27\" (UID: \"098cf4e9-8a23-42d7-ae62-497aa11abcca\") " pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.841976 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmf8n\" (UniqueName: \"kubernetes.io/projected/098cf4e9-8a23-42d7-ae62-497aa11abcca-kube-api-access-pmf8n\") pod \"redhat-marketplace-vjn27\" (UID: \"098cf4e9-8a23-42d7-ae62-497aa11abcca\") " pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.942992 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/098cf4e9-8a23-42d7-ae62-497aa11abcca-catalog-content\") pod \"redhat-marketplace-vjn27\" (UID: \"098cf4e9-8a23-42d7-ae62-497aa11abcca\") " pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.943062 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/098cf4e9-8a23-42d7-ae62-497aa11abcca-utilities\") pod \"redhat-marketplace-vjn27\" (UID: \"098cf4e9-8a23-42d7-ae62-497aa11abcca\") " pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.943098 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmf8n\" (UniqueName: \"kubernetes.io/projected/098cf4e9-8a23-42d7-ae62-497aa11abcca-kube-api-access-pmf8n\") pod \"redhat-marketplace-vjn27\" (UID: \"098cf4e9-8a23-42d7-ae62-497aa11abcca\") " pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.943529 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/098cf4e9-8a23-42d7-ae62-497aa11abcca-catalog-content\") pod \"redhat-marketplace-vjn27\" (UID: \"098cf4e9-8a23-42d7-ae62-497aa11abcca\") " pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.943650 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/098cf4e9-8a23-42d7-ae62-497aa11abcca-utilities\") pod \"redhat-marketplace-vjn27\" (UID: \"098cf4e9-8a23-42d7-ae62-497aa11abcca\") " pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.961421 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmf8n\" (UniqueName: \"kubernetes.io/projected/098cf4e9-8a23-42d7-ae62-497aa11abcca-kube-api-access-pmf8n\") pod \"redhat-marketplace-vjn27\" (UID: \"098cf4e9-8a23-42d7-ae62-497aa11abcca\") " pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:52 crc kubenswrapper[4755]: I1124 01:16:52.971876 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.250151 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8mk5m"] Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.251657 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.253397 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.258165 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8mk5m"] Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.339748 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vjn27"] Nov 24 01:16:53 crc kubenswrapper[4755]: W1124 01:16:53.348684 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod098cf4e9_8a23_42d7_ae62_497aa11abcca.slice/crio-dae11457716c4f8c309c3ecdf5bf05b4b49a300af8dbdc3ed3ef424d4ad9d0fc WatchSource:0}: Error finding container dae11457716c4f8c309c3ecdf5bf05b4b49a300af8dbdc3ed3ef424d4ad9d0fc: Status 404 returned error can't find the container with id dae11457716c4f8c309c3ecdf5bf05b4b49a300af8dbdc3ed3ef424d4ad9d0fc Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.404292 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vjn27" event={"ID":"098cf4e9-8a23-42d7-ae62-497aa11abcca","Type":"ContainerStarted","Data":"dae11457716c4f8c309c3ecdf5bf05b4b49a300af8dbdc3ed3ef424d4ad9d0fc"} Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.448540 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrfnm\" (UniqueName: \"kubernetes.io/projected/fb5622d8-2858-48d9-94e9-5a4ea557c6ae-kube-api-access-xrfnm\") pod \"certified-operators-8mk5m\" (UID: \"fb5622d8-2858-48d9-94e9-5a4ea557c6ae\") " pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.448687 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb5622d8-2858-48d9-94e9-5a4ea557c6ae-utilities\") pod \"certified-operators-8mk5m\" (UID: \"fb5622d8-2858-48d9-94e9-5a4ea557c6ae\") " pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.448765 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb5622d8-2858-48d9-94e9-5a4ea557c6ae-catalog-content\") pod \"certified-operators-8mk5m\" (UID: \"fb5622d8-2858-48d9-94e9-5a4ea557c6ae\") " pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.549492 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb5622d8-2858-48d9-94e9-5a4ea557c6ae-utilities\") pod \"certified-operators-8mk5m\" (UID: \"fb5622d8-2858-48d9-94e9-5a4ea557c6ae\") " pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.549548 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb5622d8-2858-48d9-94e9-5a4ea557c6ae-catalog-content\") pod \"certified-operators-8mk5m\" (UID: \"fb5622d8-2858-48d9-94e9-5a4ea557c6ae\") " pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.549659 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrfnm\" (UniqueName: \"kubernetes.io/projected/fb5622d8-2858-48d9-94e9-5a4ea557c6ae-kube-api-access-xrfnm\") pod \"certified-operators-8mk5m\" (UID: \"fb5622d8-2858-48d9-94e9-5a4ea557c6ae\") " pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.550459 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb5622d8-2858-48d9-94e9-5a4ea557c6ae-catalog-content\") pod \"certified-operators-8mk5m\" (UID: \"fb5622d8-2858-48d9-94e9-5a4ea557c6ae\") " pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.551665 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb5622d8-2858-48d9-94e9-5a4ea557c6ae-utilities\") pod \"certified-operators-8mk5m\" (UID: \"fb5622d8-2858-48d9-94e9-5a4ea557c6ae\") " pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.566301 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrfnm\" (UniqueName: \"kubernetes.io/projected/fb5622d8-2858-48d9-94e9-5a4ea557c6ae-kube-api-access-xrfnm\") pod \"certified-operators-8mk5m\" (UID: \"fb5622d8-2858-48d9-94e9-5a4ea557c6ae\") " pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.578035 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:16:53 crc kubenswrapper[4755]: I1124 01:16:53.949522 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8mk5m"] Nov 24 01:16:54 crc kubenswrapper[4755]: I1124 01:16:54.412262 4755 generic.go:334] "Generic (PLEG): container finished" podID="098cf4e9-8a23-42d7-ae62-497aa11abcca" containerID="3efb86e880e10558cab2e9fb5ddfa266dafc2bcc864bdbfeed796128785bdea6" exitCode=0 Nov 24 01:16:54 crc kubenswrapper[4755]: I1124 01:16:54.412477 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vjn27" event={"ID":"098cf4e9-8a23-42d7-ae62-497aa11abcca","Type":"ContainerDied","Data":"3efb86e880e10558cab2e9fb5ddfa266dafc2bcc864bdbfeed796128785bdea6"} Nov 24 01:16:54 crc kubenswrapper[4755]: I1124 01:16:54.414035 4755 generic.go:334] "Generic (PLEG): container finished" podID="fb5622d8-2858-48d9-94e9-5a4ea557c6ae" containerID="e150108e21682264cd14219eda377983fbfe2dcbf625526805c7266fcec9b121" exitCode=0 Nov 24 01:16:54 crc kubenswrapper[4755]: I1124 01:16:54.414119 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mk5m" event={"ID":"fb5622d8-2858-48d9-94e9-5a4ea557c6ae","Type":"ContainerDied","Data":"e150108e21682264cd14219eda377983fbfe2dcbf625526805c7266fcec9b121"} Nov 24 01:16:54 crc kubenswrapper[4755]: I1124 01:16:54.414144 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mk5m" event={"ID":"fb5622d8-2858-48d9-94e9-5a4ea557c6ae","Type":"ContainerStarted","Data":"9a2ad2240b49c96b71ea81572cfe3d1a552ea7c5ebeecc341ad39f3c3ff0057c"} Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.048210 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cgvb9"] Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.049292 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.051262 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.059292 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cgvb9"] Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.168576 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49c6f838-5dc3-4129-ad76-15b58019b9cc-catalog-content\") pod \"redhat-operators-cgvb9\" (UID: \"49c6f838-5dc3-4129-ad76-15b58019b9cc\") " pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.168648 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r66c6\" (UniqueName: \"kubernetes.io/projected/49c6f838-5dc3-4129-ad76-15b58019b9cc-kube-api-access-r66c6\") pod \"redhat-operators-cgvb9\" (UID: \"49c6f838-5dc3-4129-ad76-15b58019b9cc\") " pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.168687 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49c6f838-5dc3-4129-ad76-15b58019b9cc-utilities\") pod \"redhat-operators-cgvb9\" (UID: \"49c6f838-5dc3-4129-ad76-15b58019b9cc\") " pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.269768 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49c6f838-5dc3-4129-ad76-15b58019b9cc-catalog-content\") pod \"redhat-operators-cgvb9\" (UID: \"49c6f838-5dc3-4129-ad76-15b58019b9cc\") " pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.270175 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r66c6\" (UniqueName: \"kubernetes.io/projected/49c6f838-5dc3-4129-ad76-15b58019b9cc-kube-api-access-r66c6\") pod \"redhat-operators-cgvb9\" (UID: \"49c6f838-5dc3-4129-ad76-15b58019b9cc\") " pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.270212 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49c6f838-5dc3-4129-ad76-15b58019b9cc-utilities\") pod \"redhat-operators-cgvb9\" (UID: \"49c6f838-5dc3-4129-ad76-15b58019b9cc\") " pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.270330 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49c6f838-5dc3-4129-ad76-15b58019b9cc-catalog-content\") pod \"redhat-operators-cgvb9\" (UID: \"49c6f838-5dc3-4129-ad76-15b58019b9cc\") " pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.270791 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49c6f838-5dc3-4129-ad76-15b58019b9cc-utilities\") pod \"redhat-operators-cgvb9\" (UID: \"49c6f838-5dc3-4129-ad76-15b58019b9cc\") " pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.290083 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r66c6\" (UniqueName: \"kubernetes.io/projected/49c6f838-5dc3-4129-ad76-15b58019b9cc-kube-api-access-r66c6\") pod \"redhat-operators-cgvb9\" (UID: \"49c6f838-5dc3-4129-ad76-15b58019b9cc\") " pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.395787 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.421530 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mk5m" event={"ID":"fb5622d8-2858-48d9-94e9-5a4ea557c6ae","Type":"ContainerStarted","Data":"b5098e79c56dca9e40a8e5eb99c74076a318dff79b4d3b727a5d81f6b984a7c3"} Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.423849 4755 generic.go:334] "Generic (PLEG): container finished" podID="098cf4e9-8a23-42d7-ae62-497aa11abcca" containerID="9c2ffbd468cfeb40f7beb743ca11b5c985316aad9ab6818f8a6c05ad39a1fe1a" exitCode=0 Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.423900 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vjn27" event={"ID":"098cf4e9-8a23-42d7-ae62-497aa11abcca","Type":"ContainerDied","Data":"9c2ffbd468cfeb40f7beb743ca11b5c985316aad9ab6818f8a6c05ad39a1fe1a"} Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.587683 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cgvb9"] Nov 24 01:16:55 crc kubenswrapper[4755]: W1124 01:16:55.593260 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49c6f838_5dc3_4129_ad76_15b58019b9cc.slice/crio-7db66b8709aecd01595fb7f988d55f266ad7231cd0b338207f25a045a07c4c05 WatchSource:0}: Error finding container 7db66b8709aecd01595fb7f988d55f266ad7231cd0b338207f25a045a07c4c05: Status 404 returned error can't find the container with id 7db66b8709aecd01595fb7f988d55f266ad7231cd0b338207f25a045a07c4c05 Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.659986 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4ms9l"] Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.661944 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.671041 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.671560 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4ms9l"] Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.780557 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/802e4447-64d9-4370-954b-7212c1ef7a9d-utilities\") pod \"community-operators-4ms9l\" (UID: \"802e4447-64d9-4370-954b-7212c1ef7a9d\") " pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.781371 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/802e4447-64d9-4370-954b-7212c1ef7a9d-catalog-content\") pod \"community-operators-4ms9l\" (UID: \"802e4447-64d9-4370-954b-7212c1ef7a9d\") " pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.781461 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw7tz\" (UniqueName: \"kubernetes.io/projected/802e4447-64d9-4370-954b-7212c1ef7a9d-kube-api-access-gw7tz\") pod \"community-operators-4ms9l\" (UID: \"802e4447-64d9-4370-954b-7212c1ef7a9d\") " pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.882246 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/802e4447-64d9-4370-954b-7212c1ef7a9d-utilities\") pod \"community-operators-4ms9l\" (UID: \"802e4447-64d9-4370-954b-7212c1ef7a9d\") " pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.882580 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/802e4447-64d9-4370-954b-7212c1ef7a9d-catalog-content\") pod \"community-operators-4ms9l\" (UID: \"802e4447-64d9-4370-954b-7212c1ef7a9d\") " pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.882718 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw7tz\" (UniqueName: \"kubernetes.io/projected/802e4447-64d9-4370-954b-7212c1ef7a9d-kube-api-access-gw7tz\") pod \"community-operators-4ms9l\" (UID: \"802e4447-64d9-4370-954b-7212c1ef7a9d\") " pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.883019 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/802e4447-64d9-4370-954b-7212c1ef7a9d-utilities\") pod \"community-operators-4ms9l\" (UID: \"802e4447-64d9-4370-954b-7212c1ef7a9d\") " pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.883338 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/802e4447-64d9-4370-954b-7212c1ef7a9d-catalog-content\") pod \"community-operators-4ms9l\" (UID: \"802e4447-64d9-4370-954b-7212c1ef7a9d\") " pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.906494 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw7tz\" (UniqueName: \"kubernetes.io/projected/802e4447-64d9-4370-954b-7212c1ef7a9d-kube-api-access-gw7tz\") pod \"community-operators-4ms9l\" (UID: \"802e4447-64d9-4370-954b-7212c1ef7a9d\") " pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:55 crc kubenswrapper[4755]: I1124 01:16:55.995544 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.004188 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.219401 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4ms9l"] Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.437198 4755 generic.go:334] "Generic (PLEG): container finished" podID="49c6f838-5dc3-4129-ad76-15b58019b9cc" containerID="411adf12eff4ee04fb646a9367da10e88c8e4fb0697783917dd1495ef80cf725" exitCode=0 Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.437391 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgvb9" event={"ID":"49c6f838-5dc3-4129-ad76-15b58019b9cc","Type":"ContainerDied","Data":"411adf12eff4ee04fb646a9367da10e88c8e4fb0697783917dd1495ef80cf725"} Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.437658 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgvb9" event={"ID":"49c6f838-5dc3-4129-ad76-15b58019b9cc","Type":"ContainerStarted","Data":"7db66b8709aecd01595fb7f988d55f266ad7231cd0b338207f25a045a07c4c05"} Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.446704 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vjn27" event={"ID":"098cf4e9-8a23-42d7-ae62-497aa11abcca","Type":"ContainerStarted","Data":"ed767233b3f21ad6a0e76f3b520c5e34cb905650d2e068286e144050cb254397"} Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.449160 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ms9l" event={"ID":"802e4447-64d9-4370-954b-7212c1ef7a9d","Type":"ContainerStarted","Data":"6d10a5d46d674b17d276de984bbd69e14f70042ab07a81526e86a462537c7681"} Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.449196 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ms9l" event={"ID":"802e4447-64d9-4370-954b-7212c1ef7a9d","Type":"ContainerStarted","Data":"0c1a1a54fda82f52075c776c6c2356fff1d5f086fa1647ba5845a6a1bd9729f3"} Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.453513 4755 generic.go:334] "Generic (PLEG): container finished" podID="fb5622d8-2858-48d9-94e9-5a4ea557c6ae" containerID="b5098e79c56dca9e40a8e5eb99c74076a318dff79b4d3b727a5d81f6b984a7c3" exitCode=0 Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.453552 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mk5m" event={"ID":"fb5622d8-2858-48d9-94e9-5a4ea557c6ae","Type":"ContainerDied","Data":"b5098e79c56dca9e40a8e5eb99c74076a318dff79b4d3b727a5d81f6b984a7c3"} Nov 24 01:16:56 crc kubenswrapper[4755]: I1124 01:16:56.484545 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vjn27" podStartSLOduration=2.993531328 podStartE2EDuration="4.48451088s" podCreationTimestamp="2025-11-24 01:16:52 +0000 UTC" firstStartedPulling="2025-11-24 01:16:54.422327952 +0000 UTC m=+239.108393453" lastFinishedPulling="2025-11-24 01:16:55.913307504 +0000 UTC m=+240.599373005" observedRunningTime="2025-11-24 01:16:56.482796335 +0000 UTC m=+241.168861846" watchObservedRunningTime="2025-11-24 01:16:56.48451088 +0000 UTC m=+241.170576381" Nov 24 01:16:57 crc kubenswrapper[4755]: I1124 01:16:57.462200 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8mk5m" event={"ID":"fb5622d8-2858-48d9-94e9-5a4ea557c6ae","Type":"ContainerStarted","Data":"adb903777706a96e2a09713a6ca2f7d40d54aa5d7cf2c0eedd1d5e40ba05ac15"} Nov 24 01:16:57 crc kubenswrapper[4755]: I1124 01:16:57.464302 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgvb9" event={"ID":"49c6f838-5dc3-4129-ad76-15b58019b9cc","Type":"ContainerStarted","Data":"a374324fafb2329b299f5989ae5ea5791d61a5c2f9b70ff3b9c56296b7dffd8d"} Nov 24 01:16:57 crc kubenswrapper[4755]: I1124 01:16:57.465529 4755 generic.go:334] "Generic (PLEG): container finished" podID="802e4447-64d9-4370-954b-7212c1ef7a9d" containerID="6d10a5d46d674b17d276de984bbd69e14f70042ab07a81526e86a462537c7681" exitCode=0 Nov 24 01:16:57 crc kubenswrapper[4755]: I1124 01:16:57.465593 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ms9l" event={"ID":"802e4447-64d9-4370-954b-7212c1ef7a9d","Type":"ContainerDied","Data":"6d10a5d46d674b17d276de984bbd69e14f70042ab07a81526e86a462537c7681"} Nov 24 01:16:57 crc kubenswrapper[4755]: I1124 01:16:57.489739 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8mk5m" podStartSLOduration=1.9160380479999999 podStartE2EDuration="4.489715242s" podCreationTimestamp="2025-11-24 01:16:53 +0000 UTC" firstStartedPulling="2025-11-24 01:16:54.41583018 +0000 UTC m=+239.101895681" lastFinishedPulling="2025-11-24 01:16:56.989507374 +0000 UTC m=+241.675572875" observedRunningTime="2025-11-24 01:16:57.484810393 +0000 UTC m=+242.170875894" watchObservedRunningTime="2025-11-24 01:16:57.489715242 +0000 UTC m=+242.175780743" Nov 24 01:16:58 crc kubenswrapper[4755]: I1124 01:16:58.474336 4755 generic.go:334] "Generic (PLEG): container finished" podID="802e4447-64d9-4370-954b-7212c1ef7a9d" containerID="ac801731db43af83723975e5e1917cfc187c8bbada46938b6cf40d8ce93e2f0a" exitCode=0 Nov 24 01:16:58 crc kubenswrapper[4755]: I1124 01:16:58.474417 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ms9l" event={"ID":"802e4447-64d9-4370-954b-7212c1ef7a9d","Type":"ContainerDied","Data":"ac801731db43af83723975e5e1917cfc187c8bbada46938b6cf40d8ce93e2f0a"} Nov 24 01:16:58 crc kubenswrapper[4755]: I1124 01:16:58.476392 4755 generic.go:334] "Generic (PLEG): container finished" podID="49c6f838-5dc3-4129-ad76-15b58019b9cc" containerID="a374324fafb2329b299f5989ae5ea5791d61a5c2f9b70ff3b9c56296b7dffd8d" exitCode=0 Nov 24 01:16:58 crc kubenswrapper[4755]: I1124 01:16:58.476912 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgvb9" event={"ID":"49c6f838-5dc3-4129-ad76-15b58019b9cc","Type":"ContainerDied","Data":"a374324fafb2329b299f5989ae5ea5791d61a5c2f9b70ff3b9c56296b7dffd8d"} Nov 24 01:17:00 crc kubenswrapper[4755]: I1124 01:17:00.530859 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgvb9" event={"ID":"49c6f838-5dc3-4129-ad76-15b58019b9cc","Type":"ContainerStarted","Data":"3591c69c8c21e371083b4e14073a9d9908b720fe82b1857907327fd2eec1b6ad"} Nov 24 01:17:00 crc kubenswrapper[4755]: I1124 01:17:00.533184 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ms9l" event={"ID":"802e4447-64d9-4370-954b-7212c1ef7a9d","Type":"ContainerStarted","Data":"6a4f9d74d8b1a4e09a55fa839dac3135e8a1d29b97ce2e859450064860725c50"} Nov 24 01:17:00 crc kubenswrapper[4755]: I1124 01:17:00.550227 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cgvb9" podStartSLOduration=3.093818278 podStartE2EDuration="5.550210899s" podCreationTimestamp="2025-11-24 01:16:55 +0000 UTC" firstStartedPulling="2025-11-24 01:16:56.439164447 +0000 UTC m=+241.125229958" lastFinishedPulling="2025-11-24 01:16:58.895557078 +0000 UTC m=+243.581622579" observedRunningTime="2025-11-24 01:17:00.548643028 +0000 UTC m=+245.234708539" watchObservedRunningTime="2025-11-24 01:17:00.550210899 +0000 UTC m=+245.236276400" Nov 24 01:17:00 crc kubenswrapper[4755]: I1124 01:17:00.569720 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4ms9l" podStartSLOduration=4.160737056 podStartE2EDuration="5.569703633s" podCreationTimestamp="2025-11-24 01:16:55 +0000 UTC" firstStartedPulling="2025-11-24 01:16:57.466732425 +0000 UTC m=+242.152797926" lastFinishedPulling="2025-11-24 01:16:58.875699002 +0000 UTC m=+243.561764503" observedRunningTime="2025-11-24 01:17:00.569394333 +0000 UTC m=+245.255459854" watchObservedRunningTime="2025-11-24 01:17:00.569703633 +0000 UTC m=+245.255769134" Nov 24 01:17:02 crc kubenswrapper[4755]: I1124 01:17:02.972368 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:17:02 crc kubenswrapper[4755]: I1124 01:17:02.972419 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:17:03 crc kubenswrapper[4755]: I1124 01:17:03.017878 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:17:03 crc kubenswrapper[4755]: I1124 01:17:03.579032 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:17:03 crc kubenswrapper[4755]: I1124 01:17:03.579288 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:17:03 crc kubenswrapper[4755]: I1124 01:17:03.582642 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vjn27" Nov 24 01:17:03 crc kubenswrapper[4755]: I1124 01:17:03.640315 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:17:04 crc kubenswrapper[4755]: I1124 01:17:04.596392 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8mk5m" Nov 24 01:17:05 crc kubenswrapper[4755]: I1124 01:17:05.396404 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:17:05 crc kubenswrapper[4755]: I1124 01:17:05.396813 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:17:05 crc kubenswrapper[4755]: I1124 01:17:05.437370 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:17:05 crc kubenswrapper[4755]: I1124 01:17:05.608657 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cgvb9" Nov 24 01:17:06 crc kubenswrapper[4755]: I1124 01:17:06.005180 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:17:06 crc kubenswrapper[4755]: I1124 01:17:06.005217 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:17:06 crc kubenswrapper[4755]: I1124 01:17:06.057193 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:17:06 crc kubenswrapper[4755]: I1124 01:17:06.597502 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4ms9l" Nov 24 01:18:33 crc kubenswrapper[4755]: I1124 01:18:33.294846 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:18:33 crc kubenswrapper[4755]: I1124 01:18:33.295384 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:19:03 crc kubenswrapper[4755]: I1124 01:19:03.294982 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:19:03 crc kubenswrapper[4755]: I1124 01:19:03.296539 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:19:33 crc kubenswrapper[4755]: I1124 01:19:33.295091 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:19:33 crc kubenswrapper[4755]: I1124 01:19:33.295663 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:19:33 crc kubenswrapper[4755]: I1124 01:19:33.295709 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:19:33 crc kubenswrapper[4755]: I1124 01:19:33.296313 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"40fb596ee2efd1749e3b689faff00f59f67b4ea23102fca97de18ffa0a4dd608"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:19:33 crc kubenswrapper[4755]: I1124 01:19:33.296361 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://40fb596ee2efd1749e3b689faff00f59f67b4ea23102fca97de18ffa0a4dd608" gracePeriod=600 Nov 24 01:19:34 crc kubenswrapper[4755]: I1124 01:19:34.242665 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="40fb596ee2efd1749e3b689faff00f59f67b4ea23102fca97de18ffa0a4dd608" exitCode=0 Nov 24 01:19:34 crc kubenswrapper[4755]: I1124 01:19:34.242792 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"40fb596ee2efd1749e3b689faff00f59f67b4ea23102fca97de18ffa0a4dd608"} Nov 24 01:19:34 crc kubenswrapper[4755]: I1124 01:19:34.243000 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"587d82f2c33616f73d21402a931fc68cfeb5ed9c5e1ee08ba40d1b70c50f1cdd"} Nov 24 01:19:34 crc kubenswrapper[4755]: I1124 01:19:34.243020 4755 scope.go:117] "RemoveContainer" containerID="16a2280b76aa63e33f859193da8353df1f8a4014ef51c0ef6350beb5fb217245" Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.877625 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r5kgs"] Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.879335 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.903026 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r5kgs"] Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.981967 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b603a066-eef2-4527-a1d0-cf64870c7298-bound-sa-token\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.982029 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.982054 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b603a066-eef2-4527-a1d0-cf64870c7298-registry-tls\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.982091 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b603a066-eef2-4527-a1d0-cf64870c7298-registry-certificates\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.982124 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh992\" (UniqueName: \"kubernetes.io/projected/b603a066-eef2-4527-a1d0-cf64870c7298-kube-api-access-fh992\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.982191 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b603a066-eef2-4527-a1d0-cf64870c7298-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.982298 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b603a066-eef2-4527-a1d0-cf64870c7298-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:48 crc kubenswrapper[4755]: I1124 01:20:48.982359 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b603a066-eef2-4527-a1d0-cf64870c7298-trusted-ca\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.004226 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.084016 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b603a066-eef2-4527-a1d0-cf64870c7298-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.084081 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b603a066-eef2-4527-a1d0-cf64870c7298-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.084123 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b603a066-eef2-4527-a1d0-cf64870c7298-trusted-ca\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.084154 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b603a066-eef2-4527-a1d0-cf64870c7298-bound-sa-token\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.084176 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b603a066-eef2-4527-a1d0-cf64870c7298-registry-tls\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.084205 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b603a066-eef2-4527-a1d0-cf64870c7298-registry-certificates\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.084221 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh992\" (UniqueName: \"kubernetes.io/projected/b603a066-eef2-4527-a1d0-cf64870c7298-kube-api-access-fh992\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.085635 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b603a066-eef2-4527-a1d0-cf64870c7298-registry-certificates\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.085646 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b603a066-eef2-4527-a1d0-cf64870c7298-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.085814 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b603a066-eef2-4527-a1d0-cf64870c7298-trusted-ca\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.090456 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b603a066-eef2-4527-a1d0-cf64870c7298-registry-tls\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.091789 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b603a066-eef2-4527-a1d0-cf64870c7298-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.099087 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh992\" (UniqueName: \"kubernetes.io/projected/b603a066-eef2-4527-a1d0-cf64870c7298-kube-api-access-fh992\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.107565 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b603a066-eef2-4527-a1d0-cf64870c7298-bound-sa-token\") pod \"image-registry-66df7c8f76-r5kgs\" (UID: \"b603a066-eef2-4527-a1d0-cf64870c7298\") " pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.197712 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.410692 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r5kgs"] Nov 24 01:20:49 crc kubenswrapper[4755]: W1124 01:20:49.419334 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb603a066_eef2_4527_a1d0_cf64870c7298.slice/crio-cb5bd5bcf6f712413110dd8aff158d40eb818f1b2dcd2f8b15eb2b78633b38c7 WatchSource:0}: Error finding container cb5bd5bcf6f712413110dd8aff158d40eb818f1b2dcd2f8b15eb2b78633b38c7: Status 404 returned error can't find the container with id cb5bd5bcf6f712413110dd8aff158d40eb818f1b2dcd2f8b15eb2b78633b38c7 Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.742381 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" event={"ID":"b603a066-eef2-4527-a1d0-cf64870c7298","Type":"ContainerStarted","Data":"d8d749f4a6f322d5c206e299b61b8b48ec1ebdafa901d8f187eb54e5d14ba3fe"} Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.742466 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" event={"ID":"b603a066-eef2-4527-a1d0-cf64870c7298","Type":"ContainerStarted","Data":"cb5bd5bcf6f712413110dd8aff158d40eb818f1b2dcd2f8b15eb2b78633b38c7"} Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.742534 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:20:49 crc kubenswrapper[4755]: I1124 01:20:49.765230 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" podStartSLOduration=1.7652056360000001 podStartE2EDuration="1.765205636s" podCreationTimestamp="2025-11-24 01:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:20:49.757243269 +0000 UTC m=+474.443308850" watchObservedRunningTime="2025-11-24 01:20:49.765205636 +0000 UTC m=+474.451271177" Nov 24 01:21:09 crc kubenswrapper[4755]: I1124 01:21:09.204066 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-r5kgs" Nov 24 01:21:09 crc kubenswrapper[4755]: I1124 01:21:09.272580 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vxz4v"] Nov 24 01:21:33 crc kubenswrapper[4755]: I1124 01:21:33.295493 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:21:33 crc kubenswrapper[4755]: I1124 01:21:33.296192 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.320545 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" podUID="2ca81542-2eef-4099-92bd-301845e4d3c8" containerName="registry" containerID="cri-o://bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65" gracePeriod=30 Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.749812 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.841637 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-bound-sa-token\") pod \"2ca81542-2eef-4099-92bd-301845e4d3c8\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.841750 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-certificates\") pod \"2ca81542-2eef-4099-92bd-301845e4d3c8\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.841789 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2ca81542-2eef-4099-92bd-301845e4d3c8-installation-pull-secrets\") pod \"2ca81542-2eef-4099-92bd-301845e4d3c8\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.841816 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrd5w\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-kube-api-access-rrd5w\") pod \"2ca81542-2eef-4099-92bd-301845e4d3c8\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.842060 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"2ca81542-2eef-4099-92bd-301845e4d3c8\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.842132 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-trusted-ca\") pod \"2ca81542-2eef-4099-92bd-301845e4d3c8\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.842162 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2ca81542-2eef-4099-92bd-301845e4d3c8-ca-trust-extracted\") pod \"2ca81542-2eef-4099-92bd-301845e4d3c8\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.842193 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-tls\") pod \"2ca81542-2eef-4099-92bd-301845e4d3c8\" (UID: \"2ca81542-2eef-4099-92bd-301845e4d3c8\") " Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.842764 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "2ca81542-2eef-4099-92bd-301845e4d3c8" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.843417 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "2ca81542-2eef-4099-92bd-301845e4d3c8" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.852095 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "2ca81542-2eef-4099-92bd-301845e4d3c8" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.852783 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ca81542-2eef-4099-92bd-301845e4d3c8-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "2ca81542-2eef-4099-92bd-301845e4d3c8" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.853021 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-kube-api-access-rrd5w" (OuterVolumeSpecName: "kube-api-access-rrd5w") pod "2ca81542-2eef-4099-92bd-301845e4d3c8" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8"). InnerVolumeSpecName "kube-api-access-rrd5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.853167 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "2ca81542-2eef-4099-92bd-301845e4d3c8" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.858525 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "2ca81542-2eef-4099-92bd-301845e4d3c8" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.877577 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ca81542-2eef-4099-92bd-301845e4d3c8-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "2ca81542-2eef-4099-92bd-301845e4d3c8" (UID: "2ca81542-2eef-4099-92bd-301845e4d3c8"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.943924 4755 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.943987 4755 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2ca81542-2eef-4099-92bd-301845e4d3c8-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.944009 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrd5w\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-kube-api-access-rrd5w\") on node \"crc\" DevicePath \"\"" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.944032 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2ca81542-2eef-4099-92bd-301845e4d3c8-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.944053 4755 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2ca81542-2eef-4099-92bd-301845e4d3c8-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.944071 4755 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:21:34 crc kubenswrapper[4755]: I1124 01:21:34.944087 4755 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2ca81542-2eef-4099-92bd-301845e4d3c8-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 01:21:35 crc kubenswrapper[4755]: I1124 01:21:35.199318 4755 generic.go:334] "Generic (PLEG): container finished" podID="2ca81542-2eef-4099-92bd-301845e4d3c8" containerID="bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65" exitCode=0 Nov 24 01:21:35 crc kubenswrapper[4755]: I1124 01:21:35.199411 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" event={"ID":"2ca81542-2eef-4099-92bd-301845e4d3c8","Type":"ContainerDied","Data":"bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65"} Nov 24 01:21:35 crc kubenswrapper[4755]: I1124 01:21:35.199446 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" event={"ID":"2ca81542-2eef-4099-92bd-301845e4d3c8","Type":"ContainerDied","Data":"22cda212408d1d60b8a969141e808666097cf87d6372dab54547d6b93ea312f4"} Nov 24 01:21:35 crc kubenswrapper[4755]: I1124 01:21:35.199468 4755 scope.go:117] "RemoveContainer" containerID="bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65" Nov 24 01:21:35 crc kubenswrapper[4755]: I1124 01:21:35.199707 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vxz4v" Nov 24 01:21:35 crc kubenswrapper[4755]: I1124 01:21:35.234898 4755 scope.go:117] "RemoveContainer" containerID="bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65" Nov 24 01:21:35 crc kubenswrapper[4755]: E1124 01:21:35.235838 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65\": container with ID starting with bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65 not found: ID does not exist" containerID="bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65" Nov 24 01:21:35 crc kubenswrapper[4755]: I1124 01:21:35.235975 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65"} err="failed to get container status \"bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65\": rpc error: code = NotFound desc = could not find container \"bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65\": container with ID starting with bf781a8377cbb3022847bfee6df8781331b9f904d9313a0e301bc7b29fe40f65 not found: ID does not exist" Nov 24 01:21:35 crc kubenswrapper[4755]: I1124 01:21:35.266781 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vxz4v"] Nov 24 01:21:35 crc kubenswrapper[4755]: I1124 01:21:35.272035 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vxz4v"] Nov 24 01:21:36 crc kubenswrapper[4755]: I1124 01:21:36.008074 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ca81542-2eef-4099-92bd-301845e4d3c8" path="/var/lib/kubelet/pods/2ca81542-2eef-4099-92bd-301845e4d3c8/volumes" Nov 24 01:21:56 crc kubenswrapper[4755]: I1124 01:21:56.176448 4755 scope.go:117] "RemoveContainer" containerID="9c12c724b641079f8d6a3fd5cda733a9bf101ea5c962843af955b5f0399f567a" Nov 24 01:22:03 crc kubenswrapper[4755]: I1124 01:22:03.295416 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:22:03 crc kubenswrapper[4755]: I1124 01:22:03.296061 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.469535 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-kvcl8"] Nov 24 01:22:08 crc kubenswrapper[4755]: E1124 01:22:08.470186 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ca81542-2eef-4099-92bd-301845e4d3c8" containerName="registry" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.470205 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ca81542-2eef-4099-92bd-301845e4d3c8" containerName="registry" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.470345 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ca81542-2eef-4099-92bd-301845e4d3c8" containerName="registry" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.470843 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-kvcl8" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.473091 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.473295 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.473423 4755 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-phcmf" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.490297 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-kvcl8"] Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.495106 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-t6d9j"] Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.495936 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-t6d9j" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.497773 4755 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-lc9zg" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.499317 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-vwz4w"] Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.500156 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-vwz4w" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.506026 4755 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-bzjfv" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.517646 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-vwz4w"] Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.520769 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-t6d9j"] Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.566676 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgvx9\" (UniqueName: \"kubernetes.io/projected/6c7e5cd9-3286-4a20-849c-92cc8c910e10-kube-api-access-rgvx9\") pod \"cert-manager-webhook-5655c58dd6-t6d9j\" (UID: \"6c7e5cd9-3286-4a20-849c-92cc8c910e10\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-t6d9j" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.566753 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt6fk\" (UniqueName: \"kubernetes.io/projected/844c92d0-68cd-410c-ba95-f440eb5bfcfc-kube-api-access-bt6fk\") pod \"cert-manager-cainjector-7f985d654d-kvcl8\" (UID: \"844c92d0-68cd-410c-ba95-f440eb5bfcfc\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-kvcl8" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.566841 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdjc2\" (UniqueName: \"kubernetes.io/projected/4b370b30-6433-4155-be26-46a905bb6b3d-kube-api-access-gdjc2\") pod \"cert-manager-5b446d88c5-vwz4w\" (UID: \"4b370b30-6433-4155-be26-46a905bb6b3d\") " pod="cert-manager/cert-manager-5b446d88c5-vwz4w" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.667989 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdjc2\" (UniqueName: \"kubernetes.io/projected/4b370b30-6433-4155-be26-46a905bb6b3d-kube-api-access-gdjc2\") pod \"cert-manager-5b446d88c5-vwz4w\" (UID: \"4b370b30-6433-4155-be26-46a905bb6b3d\") " pod="cert-manager/cert-manager-5b446d88c5-vwz4w" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.668057 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgvx9\" (UniqueName: \"kubernetes.io/projected/6c7e5cd9-3286-4a20-849c-92cc8c910e10-kube-api-access-rgvx9\") pod \"cert-manager-webhook-5655c58dd6-t6d9j\" (UID: \"6c7e5cd9-3286-4a20-849c-92cc8c910e10\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-t6d9j" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.668105 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt6fk\" (UniqueName: \"kubernetes.io/projected/844c92d0-68cd-410c-ba95-f440eb5bfcfc-kube-api-access-bt6fk\") pod \"cert-manager-cainjector-7f985d654d-kvcl8\" (UID: \"844c92d0-68cd-410c-ba95-f440eb5bfcfc\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-kvcl8" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.690381 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt6fk\" (UniqueName: \"kubernetes.io/projected/844c92d0-68cd-410c-ba95-f440eb5bfcfc-kube-api-access-bt6fk\") pod \"cert-manager-cainjector-7f985d654d-kvcl8\" (UID: \"844c92d0-68cd-410c-ba95-f440eb5bfcfc\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-kvcl8" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.691970 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdjc2\" (UniqueName: \"kubernetes.io/projected/4b370b30-6433-4155-be26-46a905bb6b3d-kube-api-access-gdjc2\") pod \"cert-manager-5b446d88c5-vwz4w\" (UID: \"4b370b30-6433-4155-be26-46a905bb6b3d\") " pod="cert-manager/cert-manager-5b446d88c5-vwz4w" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.692373 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgvx9\" (UniqueName: \"kubernetes.io/projected/6c7e5cd9-3286-4a20-849c-92cc8c910e10-kube-api-access-rgvx9\") pod \"cert-manager-webhook-5655c58dd6-t6d9j\" (UID: \"6c7e5cd9-3286-4a20-849c-92cc8c910e10\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-t6d9j" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.786312 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-kvcl8" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.814065 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-t6d9j" Nov 24 01:22:08 crc kubenswrapper[4755]: I1124 01:22:08.820141 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-vwz4w" Nov 24 01:22:09 crc kubenswrapper[4755]: I1124 01:22:09.066809 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-t6d9j"] Nov 24 01:22:09 crc kubenswrapper[4755]: I1124 01:22:09.077098 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 01:22:09 crc kubenswrapper[4755]: I1124 01:22:09.107793 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-vwz4w"] Nov 24 01:22:09 crc kubenswrapper[4755]: W1124 01:22:09.114925 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b370b30_6433_4155_be26_46a905bb6b3d.slice/crio-edebbe2e5f2a1fa5a86e61421679a1dd8844246e2644ddbd93fd6241409a270b WatchSource:0}: Error finding container edebbe2e5f2a1fa5a86e61421679a1dd8844246e2644ddbd93fd6241409a270b: Status 404 returned error can't find the container with id edebbe2e5f2a1fa5a86e61421679a1dd8844246e2644ddbd93fd6241409a270b Nov 24 01:22:09 crc kubenswrapper[4755]: I1124 01:22:09.229001 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-kvcl8"] Nov 24 01:22:09 crc kubenswrapper[4755]: W1124 01:22:09.234745 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod844c92d0_68cd_410c_ba95_f440eb5bfcfc.slice/crio-1cbf7a1de300d3ec12a17c889244a150faee816b0e6f050d181e997ffdcf3c06 WatchSource:0}: Error finding container 1cbf7a1de300d3ec12a17c889244a150faee816b0e6f050d181e997ffdcf3c06: Status 404 returned error can't find the container with id 1cbf7a1de300d3ec12a17c889244a150faee816b0e6f050d181e997ffdcf3c06 Nov 24 01:22:09 crc kubenswrapper[4755]: I1124 01:22:09.464348 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-kvcl8" event={"ID":"844c92d0-68cd-410c-ba95-f440eb5bfcfc","Type":"ContainerStarted","Data":"1cbf7a1de300d3ec12a17c889244a150faee816b0e6f050d181e997ffdcf3c06"} Nov 24 01:22:09 crc kubenswrapper[4755]: I1124 01:22:09.466021 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-vwz4w" event={"ID":"4b370b30-6433-4155-be26-46a905bb6b3d","Type":"ContainerStarted","Data":"edebbe2e5f2a1fa5a86e61421679a1dd8844246e2644ddbd93fd6241409a270b"} Nov 24 01:22:09 crc kubenswrapper[4755]: I1124 01:22:09.466968 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-t6d9j" event={"ID":"6c7e5cd9-3286-4a20-849c-92cc8c910e10","Type":"ContainerStarted","Data":"87034d1b7069d5355473cc201b07dc1253a9888791b8a9f1493652255a619fc2"} Nov 24 01:22:13 crc kubenswrapper[4755]: I1124 01:22:13.490405 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-vwz4w" event={"ID":"4b370b30-6433-4155-be26-46a905bb6b3d","Type":"ContainerStarted","Data":"b2f5c545bbd2a63b4aff97c7b6ca141e0407be32e3cc7b590fd0145dfafe179a"} Nov 24 01:22:13 crc kubenswrapper[4755]: I1124 01:22:13.492826 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-t6d9j" event={"ID":"6c7e5cd9-3286-4a20-849c-92cc8c910e10","Type":"ContainerStarted","Data":"49b9c5cebc9f9363b8cf0791aa11052d3c992512beb72bce805b5a9064a18337"} Nov 24 01:22:13 crc kubenswrapper[4755]: I1124 01:22:13.492961 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-t6d9j" Nov 24 01:22:13 crc kubenswrapper[4755]: I1124 01:22:13.499020 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-kvcl8" event={"ID":"844c92d0-68cd-410c-ba95-f440eb5bfcfc","Type":"ContainerStarted","Data":"4cd0af33cf61f8e7ed168d3557b03b4c17640b25690a2bad08701b7d9c483ec1"} Nov 24 01:22:13 crc kubenswrapper[4755]: I1124 01:22:13.523040 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-vwz4w" podStartSLOduration=2.140460934 podStartE2EDuration="5.523015826s" podCreationTimestamp="2025-11-24 01:22:08 +0000 UTC" firstStartedPulling="2025-11-24 01:22:09.116966683 +0000 UTC m=+553.803032184" lastFinishedPulling="2025-11-24 01:22:12.499521575 +0000 UTC m=+557.185587076" observedRunningTime="2025-11-24 01:22:13.51567795 +0000 UTC m=+558.201743491" watchObservedRunningTime="2025-11-24 01:22:13.523015826 +0000 UTC m=+558.209081327" Nov 24 01:22:13 crc kubenswrapper[4755]: I1124 01:22:13.533913 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-t6d9j" podStartSLOduration=2.06597002 podStartE2EDuration="5.533887981s" podCreationTimestamp="2025-11-24 01:22:08 +0000 UTC" firstStartedPulling="2025-11-24 01:22:09.076828245 +0000 UTC m=+553.762893756" lastFinishedPulling="2025-11-24 01:22:12.544746216 +0000 UTC m=+557.230811717" observedRunningTime="2025-11-24 01:22:13.53170062 +0000 UTC m=+558.217766161" watchObservedRunningTime="2025-11-24 01:22:13.533887981 +0000 UTC m=+558.219953522" Nov 24 01:22:18 crc kubenswrapper[4755]: I1124 01:22:18.819313 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-t6d9j" Nov 24 01:22:18 crc kubenswrapper[4755]: I1124 01:22:18.841520 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-kvcl8" podStartSLOduration=7.530808737 podStartE2EDuration="10.841489628s" podCreationTimestamp="2025-11-24 01:22:08 +0000 UTC" firstStartedPulling="2025-11-24 01:22:09.237755288 +0000 UTC m=+553.923820809" lastFinishedPulling="2025-11-24 01:22:12.548436149 +0000 UTC m=+557.234501700" observedRunningTime="2025-11-24 01:22:13.547273748 +0000 UTC m=+558.233339249" watchObservedRunningTime="2025-11-24 01:22:18.841489628 +0000 UTC m=+563.527555169" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.071581 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4ngwk"] Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.072280 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovn-controller" containerID="cri-o://c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d" gracePeriod=30 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.072333 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="nbdb" containerID="cri-o://5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad" gracePeriod=30 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.072416 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="kube-rbac-proxy-node" containerID="cri-o://95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db" gracePeriod=30 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.072441 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovn-acl-logging" containerID="cri-o://15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b" gracePeriod=30 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.072535 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="northd" containerID="cri-o://d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662" gracePeriod=30 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.072590 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="sbdb" containerID="cri-o://43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766" gracePeriod=30 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.073539 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403" gracePeriod=30 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.118163 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" containerID="cri-o://cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4" gracePeriod=30 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.423912 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/3.log" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.427189 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovn-acl-logging/0.log" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.427748 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovn-controller/0.log" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.428266 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.482640 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-kjhj5"] Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486175 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="northd" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486262 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="northd" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486279 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="sbdb" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486290 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="sbdb" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486305 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486315 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486331 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486339 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486349 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486357 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486376 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="kubecfg-setup" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486389 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="kubecfg-setup" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486406 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovn-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486423 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovn-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486436 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="kube-rbac-proxy-node" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486448 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="kube-rbac-proxy-node" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486468 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="nbdb" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486480 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="nbdb" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486492 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovn-acl-logging" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486503 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovn-acl-logging" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.486515 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="kube-rbac-proxy-ovn-metrics" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486524 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="kube-rbac-proxy-ovn-metrics" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486709 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486725 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486744 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="northd" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486756 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="kube-rbac-proxy-ovn-metrics" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486771 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486787 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovn-acl-logging" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486799 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="kube-rbac-proxy-node" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486814 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="sbdb" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486830 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="nbdb" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486846 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.486860 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovn-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.487053 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.487067 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.487240 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.487421 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.487434 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerName="ovnkube-controller" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.490638 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.534070 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovnkube-controller/3.log" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.536573 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovn-acl-logging/0.log" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537051 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4ngwk_b3b1d3cb-ffbd-4034-832d-6577ccf2f780/ovn-controller/0.log" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537406 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4" exitCode=0 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537436 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766" exitCode=0 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537447 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad" exitCode=0 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537596 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537654 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537662 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537677 4755 scope.go:117] "RemoveContainer" containerID="cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537666 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537834 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.537456 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662" exitCode=0 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538153 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403" exitCode=0 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538167 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db" exitCode=0 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538176 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b" exitCode=143 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538184 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" containerID="c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d" exitCode=143 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538235 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538256 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538269 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538281 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538288 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538295 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538301 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538308 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538315 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538322 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538329 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538339 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538349 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538357 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538364 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538371 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538378 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538384 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538391 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538398 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538404 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538410 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538420 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538430 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538438 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538445 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538452 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538459 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538466 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538474 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538481 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538489 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538497 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538507 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4ngwk" event={"ID":"b3b1d3cb-ffbd-4034-832d-6577ccf2f780","Type":"ContainerDied","Data":"7fb662d2864d7cf4a1f4f1c263683e549f040a12ab6ec7b20aeb38ea0616ec7c"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538518 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538527 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538534 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538541 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538548 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538557 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538564 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538570 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538578 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.538584 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.541290 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/2.log" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542390 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/1.log" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542425 4755 generic.go:334] "Generic (PLEG): container finished" podID="19dbf7ff-f684-4c57-803a-83b39e0705a4" containerID="7d63755ca911a04529ff56ebf1481bbd429aeb3305588a602d374328fb47890f" exitCode=2 Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542445 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8pm69" event={"ID":"19dbf7ff-f684-4c57-803a-83b39e0705a4","Type":"ContainerDied","Data":"7d63755ca911a04529ff56ebf1481bbd429aeb3305588a602d374328fb47890f"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542460 4755 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685"} Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542466 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-node-log\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542504 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-env-overrides\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542543 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-kubelet\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542559 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-netns\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542578 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-slash\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542679 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-script-lib\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542698 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-config\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542727 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-systemd\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542734 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542742 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-netd\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542764 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542755 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-node-log" (OuterVolumeSpecName: "node-log") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542778 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-var-lib-cni-networks-ovn-kubernetes\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542796 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542826 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-ovn\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542881 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-bin\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542883 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542931 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542971 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543058 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543113 4755 scope.go:117] "RemoveContainer" containerID="7d63755ca911a04529ff56ebf1481bbd429aeb3305588a602d374328fb47890f" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542922 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-ovn-kubernetes\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543296 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543317 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543357 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovn-node-metrics-cert\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543389 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-etc-openvswitch\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.543411 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-8pm69_openshift-multus(19dbf7ff-f684-4c57-803a-83b39e0705a4)\"" pod="openshift-multus/multus-8pm69" podUID="19dbf7ff-f684-4c57-803a-83b39e0705a4" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543432 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-systemd-units\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543449 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-var-lib-openvswitch\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543491 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543504 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543539 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nhqm\" (UniqueName: \"kubernetes.io/projected/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-kube-api-access-5nhqm\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544023 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-log-socket\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544041 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-openvswitch\") pod \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\" (UID: \"b3b1d3cb-ffbd-4034-832d-6577ccf2f780\") " Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543574 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.542743 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-slash" (OuterVolumeSpecName: "host-slash") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.543930 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544124 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-log-socket" (OuterVolumeSpecName: "log-socket") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544236 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544294 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-run-openvswitch\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544316 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f872bc21-473a-4128-9771-3f721b40d17c-ovnkube-config\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544336 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-etc-openvswitch\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544355 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-node-log\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544373 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-systemd-units\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544399 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-run-ovn\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544554 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-run-ovn-kubernetes\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544615 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-var-lib-openvswitch\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544634 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-slash\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544650 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-cni-bin\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544672 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544688 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-run-netns\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544740 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f872bc21-473a-4128-9771-3f721b40d17c-env-overrides\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544766 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f872bc21-473a-4128-9771-3f721b40d17c-ovnkube-script-lib\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544793 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f872bc21-473a-4128-9771-3f721b40d17c-ovn-node-metrics-cert\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544811 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-kubelet\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544863 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-log-socket\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544881 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-run-systemd\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544900 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-cni-netd\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544922 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz5k5\" (UniqueName: \"kubernetes.io/projected/f872bc21-473a-4128-9771-3f721b40d17c-kube-api-access-fz5k5\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544958 4755 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-log-socket\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544969 4755 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544978 4755 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-node-log\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544987 4755 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.544996 4755 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-kubelet\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545004 4755 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-netns\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545014 4755 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-slash\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545023 4755 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545032 4755 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545041 4755 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-netd\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545050 4755 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545060 4755 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545069 4755 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-cni-bin\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545079 4755 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545088 4755 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545097 4755 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-systemd-units\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.545105 4755 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.549293 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-kube-api-access-5nhqm" (OuterVolumeSpecName: "kube-api-access-5nhqm") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "kube-api-access-5nhqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.550007 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.557782 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "b3b1d3cb-ffbd-4034-832d-6577ccf2f780" (UID: "b3b1d3cb-ffbd-4034-832d-6577ccf2f780"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.560370 4755 scope.go:117] "RemoveContainer" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.579323 4755 scope.go:117] "RemoveContainer" containerID="43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.591243 4755 scope.go:117] "RemoveContainer" containerID="5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.602103 4755 scope.go:117] "RemoveContainer" containerID="d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.614235 4755 scope.go:117] "RemoveContainer" containerID="dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.625590 4755 scope.go:117] "RemoveContainer" containerID="95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.636956 4755 scope.go:117] "RemoveContainer" containerID="15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.645732 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-log-socket\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.645803 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-run-systemd\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.645833 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-cni-netd\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.645842 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-log-socket\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.645882 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz5k5\" (UniqueName: \"kubernetes.io/projected/f872bc21-473a-4128-9771-3f721b40d17c-kube-api-access-fz5k5\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.645894 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-run-systemd\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.645914 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-run-openvswitch\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.645948 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-cni-netd\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.645978 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-run-openvswitch\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.645934 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f872bc21-473a-4128-9771-3f721b40d17c-ovnkube-config\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646031 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-etc-openvswitch\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646059 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-node-log\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646083 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-systemd-units\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646108 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-run-ovn\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646133 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-run-ovn-kubernetes\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646148 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-node-log\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646155 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-var-lib-openvswitch\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646178 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-slash\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646198 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-cni-bin\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646215 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-etc-openvswitch\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646219 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-run-netns\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646238 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-run-ovn-kubernetes\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646258 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646285 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-systemd-units\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646292 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-slash\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646268 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-var-lib-openvswitch\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646245 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-run-netns\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646331 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-run-ovn\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646335 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-cni-bin\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646365 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646480 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f872bc21-473a-4128-9771-3f721b40d17c-env-overrides\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646517 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f872bc21-473a-4128-9771-3f721b40d17c-ovnkube-script-lib\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646543 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f872bc21-473a-4128-9771-3f721b40d17c-ovn-node-metrics-cert\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646566 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-kubelet\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646636 4755 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-run-systemd\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646651 4755 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646665 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nhqm\" (UniqueName: \"kubernetes.io/projected/b3b1d3cb-ffbd-4034-832d-6577ccf2f780-kube-api-access-5nhqm\") on node \"crc\" DevicePath \"\"" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646695 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f872bc21-473a-4128-9771-3f721b40d17c-host-kubelet\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.646737 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f872bc21-473a-4128-9771-3f721b40d17c-ovnkube-config\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.647815 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f872bc21-473a-4128-9771-3f721b40d17c-env-overrides\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.648016 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f872bc21-473a-4128-9771-3f721b40d17c-ovnkube-script-lib\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.653168 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f872bc21-473a-4128-9771-3f721b40d17c-ovn-node-metrics-cert\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.653872 4755 scope.go:117] "RemoveContainer" containerID="c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.661825 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz5k5\" (UniqueName: \"kubernetes.io/projected/f872bc21-473a-4128-9771-3f721b40d17c-kube-api-access-fz5k5\") pod \"ovnkube-node-kjhj5\" (UID: \"f872bc21-473a-4128-9771-3f721b40d17c\") " pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.666884 4755 scope.go:117] "RemoveContainer" containerID="7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.679676 4755 scope.go:117] "RemoveContainer" containerID="cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.680013 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": container with ID starting with cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4 not found: ID does not exist" containerID="cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.680044 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4"} err="failed to get container status \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": rpc error: code = NotFound desc = could not find container \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": container with ID starting with cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.680064 4755 scope.go:117] "RemoveContainer" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.680342 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\": container with ID starting with 722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d not found: ID does not exist" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.680375 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d"} err="failed to get container status \"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\": rpc error: code = NotFound desc = could not find container \"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\": container with ID starting with 722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.680401 4755 scope.go:117] "RemoveContainer" containerID="43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.680651 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\": container with ID starting with 43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766 not found: ID does not exist" containerID="43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.680672 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766"} err="failed to get container status \"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\": rpc error: code = NotFound desc = could not find container \"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\": container with ID starting with 43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.680685 4755 scope.go:117] "RemoveContainer" containerID="5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.680979 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\": container with ID starting with 5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad not found: ID does not exist" containerID="5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.681004 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad"} err="failed to get container status \"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\": rpc error: code = NotFound desc = could not find container \"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\": container with ID starting with 5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.681017 4755 scope.go:117] "RemoveContainer" containerID="d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.681278 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\": container with ID starting with d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662 not found: ID does not exist" containerID="d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.681295 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662"} err="failed to get container status \"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\": rpc error: code = NotFound desc = could not find container \"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\": container with ID starting with d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.681305 4755 scope.go:117] "RemoveContainer" containerID="dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.681492 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\": container with ID starting with dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403 not found: ID does not exist" containerID="dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.681511 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403"} err="failed to get container status \"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\": rpc error: code = NotFound desc = could not find container \"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\": container with ID starting with dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.681522 4755 scope.go:117] "RemoveContainer" containerID="95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.681703 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\": container with ID starting with 95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db not found: ID does not exist" containerID="95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.681720 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db"} err="failed to get container status \"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\": rpc error: code = NotFound desc = could not find container \"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\": container with ID starting with 95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.681731 4755 scope.go:117] "RemoveContainer" containerID="15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.681944 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\": container with ID starting with 15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b not found: ID does not exist" containerID="15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.681967 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b"} err="failed to get container status \"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\": rpc error: code = NotFound desc = could not find container \"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\": container with ID starting with 15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.681981 4755 scope.go:117] "RemoveContainer" containerID="c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.682229 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\": container with ID starting with c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d not found: ID does not exist" containerID="c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.682248 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d"} err="failed to get container status \"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\": rpc error: code = NotFound desc = could not find container \"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\": container with ID starting with c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.682261 4755 scope.go:117] "RemoveContainer" containerID="7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d" Nov 24 01:22:19 crc kubenswrapper[4755]: E1124 01:22:19.682501 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\": container with ID starting with 7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d not found: ID does not exist" containerID="7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.682519 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d"} err="failed to get container status \"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\": rpc error: code = NotFound desc = could not find container \"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\": container with ID starting with 7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.682534 4755 scope.go:117] "RemoveContainer" containerID="cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.682712 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4"} err="failed to get container status \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": rpc error: code = NotFound desc = could not find container \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": container with ID starting with cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.682729 4755 scope.go:117] "RemoveContainer" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.682893 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d"} err="failed to get container status \"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\": rpc error: code = NotFound desc = could not find container \"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\": container with ID starting with 722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.682909 4755 scope.go:117] "RemoveContainer" containerID="43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.683071 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766"} err="failed to get container status \"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\": rpc error: code = NotFound desc = could not find container \"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\": container with ID starting with 43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.683089 4755 scope.go:117] "RemoveContainer" containerID="5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.683253 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad"} err="failed to get container status \"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\": rpc error: code = NotFound desc = could not find container \"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\": container with ID starting with 5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.683268 4755 scope.go:117] "RemoveContainer" containerID="d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.683529 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662"} err="failed to get container status \"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\": rpc error: code = NotFound desc = could not find container \"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\": container with ID starting with d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.683546 4755 scope.go:117] "RemoveContainer" containerID="dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.684508 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403"} err="failed to get container status \"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\": rpc error: code = NotFound desc = could not find container \"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\": container with ID starting with dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.684529 4755 scope.go:117] "RemoveContainer" containerID="95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.684748 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db"} err="failed to get container status \"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\": rpc error: code = NotFound desc = could not find container \"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\": container with ID starting with 95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.684766 4755 scope.go:117] "RemoveContainer" containerID="15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.684931 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b"} err="failed to get container status \"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\": rpc error: code = NotFound desc = could not find container \"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\": container with ID starting with 15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.684948 4755 scope.go:117] "RemoveContainer" containerID="c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.685189 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d"} err="failed to get container status \"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\": rpc error: code = NotFound desc = could not find container \"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\": container with ID starting with c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.685205 4755 scope.go:117] "RemoveContainer" containerID="7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.685437 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d"} err="failed to get container status \"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\": rpc error: code = NotFound desc = could not find container \"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\": container with ID starting with 7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.685452 4755 scope.go:117] "RemoveContainer" containerID="cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.685664 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4"} err="failed to get container status \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": rpc error: code = NotFound desc = could not find container \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": container with ID starting with cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.685680 4755 scope.go:117] "RemoveContainer" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.685900 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d"} err="failed to get container status \"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\": rpc error: code = NotFound desc = could not find container \"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\": container with ID starting with 722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.685917 4755 scope.go:117] "RemoveContainer" containerID="43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.686135 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766"} err="failed to get container status \"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\": rpc error: code = NotFound desc = could not find container \"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\": container with ID starting with 43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.686150 4755 scope.go:117] "RemoveContainer" containerID="5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.686327 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad"} err="failed to get container status \"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\": rpc error: code = NotFound desc = could not find container \"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\": container with ID starting with 5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.686341 4755 scope.go:117] "RemoveContainer" containerID="d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.686551 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662"} err="failed to get container status \"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\": rpc error: code = NotFound desc = could not find container \"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\": container with ID starting with d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.686565 4755 scope.go:117] "RemoveContainer" containerID="dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.686779 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403"} err="failed to get container status \"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\": rpc error: code = NotFound desc = could not find container \"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\": container with ID starting with dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.686794 4755 scope.go:117] "RemoveContainer" containerID="95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.686993 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db"} err="failed to get container status \"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\": rpc error: code = NotFound desc = could not find container \"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\": container with ID starting with 95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687009 4755 scope.go:117] "RemoveContainer" containerID="15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687175 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b"} err="failed to get container status \"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\": rpc error: code = NotFound desc = could not find container \"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\": container with ID starting with 15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687190 4755 scope.go:117] "RemoveContainer" containerID="c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687346 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d"} err="failed to get container status \"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\": rpc error: code = NotFound desc = could not find container \"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\": container with ID starting with c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687362 4755 scope.go:117] "RemoveContainer" containerID="7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687542 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d"} err="failed to get container status \"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\": rpc error: code = NotFound desc = could not find container \"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\": container with ID starting with 7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687559 4755 scope.go:117] "RemoveContainer" containerID="cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687738 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4"} err="failed to get container status \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": rpc error: code = NotFound desc = could not find container \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": container with ID starting with cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687754 4755 scope.go:117] "RemoveContainer" containerID="722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687916 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d"} err="failed to get container status \"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\": rpc error: code = NotFound desc = could not find container \"722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d\": container with ID starting with 722b96ad79d500044a30b065a6a5fa100a2388f7c296ae8858a3a83b881a1e0d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.687930 4755 scope.go:117] "RemoveContainer" containerID="43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.688086 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766"} err="failed to get container status \"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\": rpc error: code = NotFound desc = could not find container \"43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766\": container with ID starting with 43dc4f4f9001f7f3960fee7085a02ab37c3598560ce28cfa0315e22c448e7766 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.688101 4755 scope.go:117] "RemoveContainer" containerID="5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.688256 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad"} err="failed to get container status \"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\": rpc error: code = NotFound desc = could not find container \"5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad\": container with ID starting with 5f02902d8dd9a6cd2181046af5d5a9f80bcdc441f93121982558a3c7c6b4d0ad not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.688270 4755 scope.go:117] "RemoveContainer" containerID="d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.688425 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662"} err="failed to get container status \"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\": rpc error: code = NotFound desc = could not find container \"d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662\": container with ID starting with d7727a48ccb229a54c1d29a764535f24c4cdb87b86abdc7ef9febfbad994e662 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.688439 4755 scope.go:117] "RemoveContainer" containerID="dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.688592 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403"} err="failed to get container status \"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\": rpc error: code = NotFound desc = could not find container \"dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403\": container with ID starting with dd1511d0bcad19519854f09add13114dab24846eced876cac2849da8dc023403 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.688655 4755 scope.go:117] "RemoveContainer" containerID="95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.688835 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db"} err="failed to get container status \"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\": rpc error: code = NotFound desc = could not find container \"95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db\": container with ID starting with 95778d535afcdb7dd208d993e02f83908dfac3c257270b672f09c174a7f8b7db not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.688851 4755 scope.go:117] "RemoveContainer" containerID="15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.689011 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b"} err="failed to get container status \"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\": rpc error: code = NotFound desc = could not find container \"15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b\": container with ID starting with 15f49d51363c0bfb4741bef831431fadd985e801457aab42584995418d18b21b not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.689031 4755 scope.go:117] "RemoveContainer" containerID="c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.689186 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d"} err="failed to get container status \"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\": rpc error: code = NotFound desc = could not find container \"c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d\": container with ID starting with c593cfbb3b4e8ede95c14949ca49c3088fe03e173e7fca73a90dec51519da37d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.689202 4755 scope.go:117] "RemoveContainer" containerID="7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.689349 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d"} err="failed to get container status \"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\": rpc error: code = NotFound desc = could not find container \"7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d\": container with ID starting with 7e9ee86083f603638f8e18c9dc31423338d2f170df294e3c956a3ef90097631d not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.689364 4755 scope.go:117] "RemoveContainer" containerID="cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.689511 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4"} err="failed to get container status \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": rpc error: code = NotFound desc = could not find container \"cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4\": container with ID starting with cca5bd213ea6f69dd96cf34cf1e3fdb22b20ba529527b33302f824ac9ab8d6b4 not found: ID does not exist" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.803745 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.878979 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4ngwk"] Nov 24 01:22:19 crc kubenswrapper[4755]: I1124 01:22:19.897630 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4ngwk"] Nov 24 01:22:20 crc kubenswrapper[4755]: I1124 01:22:20.012443 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3b1d3cb-ffbd-4034-832d-6577ccf2f780" path="/var/lib/kubelet/pods/b3b1d3cb-ffbd-4034-832d-6577ccf2f780/volumes" Nov 24 01:22:20 crc kubenswrapper[4755]: I1124 01:22:20.551337 4755 generic.go:334] "Generic (PLEG): container finished" podID="f872bc21-473a-4128-9771-3f721b40d17c" containerID="4a25de28f4aac8bf1ac07f7c83449f8c173d43f942b18443c69081a28bae88ef" exitCode=0 Nov 24 01:22:20 crc kubenswrapper[4755]: I1124 01:22:20.551401 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" event={"ID":"f872bc21-473a-4128-9771-3f721b40d17c","Type":"ContainerDied","Data":"4a25de28f4aac8bf1ac07f7c83449f8c173d43f942b18443c69081a28bae88ef"} Nov 24 01:22:20 crc kubenswrapper[4755]: I1124 01:22:20.551470 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" event={"ID":"f872bc21-473a-4128-9771-3f721b40d17c","Type":"ContainerStarted","Data":"5772832e81ab713035ba2e8d28faa333e72b4c1dfc5215ee5da10ff284fd4c93"} Nov 24 01:22:21 crc kubenswrapper[4755]: I1124 01:22:21.565461 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" event={"ID":"f872bc21-473a-4128-9771-3f721b40d17c","Type":"ContainerStarted","Data":"6c403c7f2ceb6d78ba2d4eeda291c1824039ea77ffc97d1ea45dde06df4a5330"} Nov 24 01:22:21 crc kubenswrapper[4755]: I1124 01:22:21.566725 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" event={"ID":"f872bc21-473a-4128-9771-3f721b40d17c","Type":"ContainerStarted","Data":"1061ff5dbbcb1912da50caa691e51afcc1ad7334f04296be71c43338328750cb"} Nov 24 01:22:21 crc kubenswrapper[4755]: I1124 01:22:21.566771 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" event={"ID":"f872bc21-473a-4128-9771-3f721b40d17c","Type":"ContainerStarted","Data":"b30dfc137784d52f5cdaa66e342a5d0d38cffa2f90aa30629581a9fcc6c08bbc"} Nov 24 01:22:21 crc kubenswrapper[4755]: I1124 01:22:21.566786 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" event={"ID":"f872bc21-473a-4128-9771-3f721b40d17c","Type":"ContainerStarted","Data":"ab17dc9b5c136eefc9f48fc740bc0d1bbc059f9ac0f455a76b6431fe9d45f243"} Nov 24 01:22:21 crc kubenswrapper[4755]: I1124 01:22:21.566798 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" event={"ID":"f872bc21-473a-4128-9771-3f721b40d17c","Type":"ContainerStarted","Data":"f417f61d8651ba989420401ca4233046ef7b8d3b594e66687cbf78defd169a2e"} Nov 24 01:22:21 crc kubenswrapper[4755]: I1124 01:22:21.566811 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" event={"ID":"f872bc21-473a-4128-9771-3f721b40d17c","Type":"ContainerStarted","Data":"d8ffd1b0099af2b11c4fbbe4c60eb171fc2ad8181981f19ae7672791544bb6fd"} Nov 24 01:22:23 crc kubenswrapper[4755]: I1124 01:22:23.583353 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" event={"ID":"f872bc21-473a-4128-9771-3f721b40d17c","Type":"ContainerStarted","Data":"67c0d89daf22195e524bf42ee7d4583cf34c4196fcb65d10ab220934c0cbe505"} Nov 24 01:22:26 crc kubenswrapper[4755]: I1124 01:22:26.614860 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" event={"ID":"f872bc21-473a-4128-9771-3f721b40d17c","Type":"ContainerStarted","Data":"c241500790068b3f9ce784ba6774cfed3515c6f43bdc07bea57e7a51529b6a1d"} Nov 24 01:22:26 crc kubenswrapper[4755]: I1124 01:22:26.615635 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:26 crc kubenswrapper[4755]: I1124 01:22:26.615955 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:26 crc kubenswrapper[4755]: I1124 01:22:26.616034 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:26 crc kubenswrapper[4755]: I1124 01:22:26.646080 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:26 crc kubenswrapper[4755]: I1124 01:22:26.651553 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:26 crc kubenswrapper[4755]: I1124 01:22:26.652541 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" podStartSLOduration=7.652525999 podStartE2EDuration="7.652525999s" podCreationTimestamp="2025-11-24 01:22:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:22:26.6511276 +0000 UTC m=+571.337193151" watchObservedRunningTime="2025-11-24 01:22:26.652525999 +0000 UTC m=+571.338591490" Nov 24 01:22:31 crc kubenswrapper[4755]: I1124 01:22:31.997502 4755 scope.go:117] "RemoveContainer" containerID="7d63755ca911a04529ff56ebf1481bbd429aeb3305588a602d374328fb47890f" Nov 24 01:22:31 crc kubenswrapper[4755]: E1124 01:22:31.998298 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-8pm69_openshift-multus(19dbf7ff-f684-4c57-803a-83b39e0705a4)\"" pod="openshift-multus/multus-8pm69" podUID="19dbf7ff-f684-4c57-803a-83b39e0705a4" Nov 24 01:22:33 crc kubenswrapper[4755]: I1124 01:22:33.294887 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:22:33 crc kubenswrapper[4755]: I1124 01:22:33.295207 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:22:33 crc kubenswrapper[4755]: I1124 01:22:33.295253 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:22:33 crc kubenswrapper[4755]: I1124 01:22:33.295776 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"587d82f2c33616f73d21402a931fc68cfeb5ed9c5e1ee08ba40d1b70c50f1cdd"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:22:33 crc kubenswrapper[4755]: I1124 01:22:33.295828 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://587d82f2c33616f73d21402a931fc68cfeb5ed9c5e1ee08ba40d1b70c50f1cdd" gracePeriod=600 Nov 24 01:22:33 crc kubenswrapper[4755]: I1124 01:22:33.660880 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="587d82f2c33616f73d21402a931fc68cfeb5ed9c5e1ee08ba40d1b70c50f1cdd" exitCode=0 Nov 24 01:22:33 crc kubenswrapper[4755]: I1124 01:22:33.660944 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"587d82f2c33616f73d21402a931fc68cfeb5ed9c5e1ee08ba40d1b70c50f1cdd"} Nov 24 01:22:33 crc kubenswrapper[4755]: I1124 01:22:33.661255 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"31c763569028cdcbeab7620c7ace03dd90f3c86c98eb54fc2ca5ba33d792fb99"} Nov 24 01:22:33 crc kubenswrapper[4755]: I1124 01:22:33.661284 4755 scope.go:117] "RemoveContainer" containerID="40fb596ee2efd1749e3b689faff00f59f67b4ea23102fca97de18ffa0a4dd608" Nov 24 01:22:43 crc kubenswrapper[4755]: I1124 01:22:42.996852 4755 scope.go:117] "RemoveContainer" containerID="7d63755ca911a04529ff56ebf1481bbd429aeb3305588a602d374328fb47890f" Nov 24 01:22:43 crc kubenswrapper[4755]: I1124 01:22:43.722322 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/2.log" Nov 24 01:22:43 crc kubenswrapper[4755]: I1124 01:22:43.723180 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/1.log" Nov 24 01:22:43 crc kubenswrapper[4755]: I1124 01:22:43.723237 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8pm69" event={"ID":"19dbf7ff-f684-4c57-803a-83b39e0705a4","Type":"ContainerStarted","Data":"a80f77d38ae40e6b5e9a0990dd59cccbd53c4d389db31530ba4dd0c8103cce3e"} Nov 24 01:22:49 crc kubenswrapper[4755]: I1124 01:22:49.840911 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kjhj5" Nov 24 01:22:56 crc kubenswrapper[4755]: I1124 01:22:56.215310 4755 scope.go:117] "RemoveContainer" containerID="890eaac30a0fdd773814c7b19a5c7dec1eb0c6be468d72c41fc45382087559a6" Nov 24 01:22:56 crc kubenswrapper[4755]: I1124 01:22:56.235929 4755 scope.go:117] "RemoveContainer" containerID="52945befc58a2caf2afa0272e9cd3d561780193a711a3905d8383c6941d7e8dc" Nov 24 01:22:56 crc kubenswrapper[4755]: I1124 01:22:56.253716 4755 scope.go:117] "RemoveContainer" containerID="edbc9d9876663d11869ff6269682427495c7d205d739e946c354595876274685" Nov 24 01:22:56 crc kubenswrapper[4755]: I1124 01:22:56.810521 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8pm69_19dbf7ff-f684-4c57-803a-83b39e0705a4/kube-multus/2.log" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.745646 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z"] Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.747399 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.751165 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.762495 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z"] Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.819037 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.819114 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.819477 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mnbl\" (UniqueName: \"kubernetes.io/projected/9fa781fe-e51d-4912-b210-e873945bcbf8-kube-api-access-5mnbl\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.921183 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.921256 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.921389 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mnbl\" (UniqueName: \"kubernetes.io/projected/9fa781fe-e51d-4912-b210-e873945bcbf8-kube-api-access-5mnbl\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.922164 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.922282 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:01 crc kubenswrapper[4755]: I1124 01:23:01.960945 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mnbl\" (UniqueName: \"kubernetes.io/projected/9fa781fe-e51d-4912-b210-e873945bcbf8-kube-api-access-5mnbl\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:02 crc kubenswrapper[4755]: I1124 01:23:02.065434 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:02 crc kubenswrapper[4755]: I1124 01:23:02.290563 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z"] Nov 24 01:23:02 crc kubenswrapper[4755]: I1124 01:23:02.852530 4755 generic.go:334] "Generic (PLEG): container finished" podID="9fa781fe-e51d-4912-b210-e873945bcbf8" containerID="11c8eee0cc2357e5546f46c906265b51a101949919fc78c652db43d9d0585d6f" exitCode=0 Nov 24 01:23:02 crc kubenswrapper[4755]: I1124 01:23:02.852594 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" event={"ID":"9fa781fe-e51d-4912-b210-e873945bcbf8","Type":"ContainerDied","Data":"11c8eee0cc2357e5546f46c906265b51a101949919fc78c652db43d9d0585d6f"} Nov 24 01:23:02 crc kubenswrapper[4755]: I1124 01:23:02.852661 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" event={"ID":"9fa781fe-e51d-4912-b210-e873945bcbf8","Type":"ContainerStarted","Data":"edd689f9525030641296111d96e59aa2c942ab548dd37258178da52d95ab2fc1"} Nov 24 01:23:04 crc kubenswrapper[4755]: I1124 01:23:04.870074 4755 generic.go:334] "Generic (PLEG): container finished" podID="9fa781fe-e51d-4912-b210-e873945bcbf8" containerID="4b76a4da126c3fed90e301ba6c73648d83b0a692c4b9567630c493557969b30c" exitCode=0 Nov 24 01:23:04 crc kubenswrapper[4755]: I1124 01:23:04.870145 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" event={"ID":"9fa781fe-e51d-4912-b210-e873945bcbf8","Type":"ContainerDied","Data":"4b76a4da126c3fed90e301ba6c73648d83b0a692c4b9567630c493557969b30c"} Nov 24 01:23:05 crc kubenswrapper[4755]: I1124 01:23:05.879427 4755 generic.go:334] "Generic (PLEG): container finished" podID="9fa781fe-e51d-4912-b210-e873945bcbf8" containerID="53a1967c164c9f85991d2fe9d1f4f263a18aa935f65319c53faa801d08696d89" exitCode=0 Nov 24 01:23:05 crc kubenswrapper[4755]: I1124 01:23:05.879543 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" event={"ID":"9fa781fe-e51d-4912-b210-e873945bcbf8","Type":"ContainerDied","Data":"53a1967c164c9f85991d2fe9d1f4f263a18aa935f65319c53faa801d08696d89"} Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.229982 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.299670 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mnbl\" (UniqueName: \"kubernetes.io/projected/9fa781fe-e51d-4912-b210-e873945bcbf8-kube-api-access-5mnbl\") pod \"9fa781fe-e51d-4912-b210-e873945bcbf8\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.299748 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-bundle\") pod \"9fa781fe-e51d-4912-b210-e873945bcbf8\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.299827 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-util\") pod \"9fa781fe-e51d-4912-b210-e873945bcbf8\" (UID: \"9fa781fe-e51d-4912-b210-e873945bcbf8\") " Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.300684 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-bundle" (OuterVolumeSpecName: "bundle") pod "9fa781fe-e51d-4912-b210-e873945bcbf8" (UID: "9fa781fe-e51d-4912-b210-e873945bcbf8"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.315798 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-util" (OuterVolumeSpecName: "util") pod "9fa781fe-e51d-4912-b210-e873945bcbf8" (UID: "9fa781fe-e51d-4912-b210-e873945bcbf8"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.319565 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fa781fe-e51d-4912-b210-e873945bcbf8-kube-api-access-5mnbl" (OuterVolumeSpecName: "kube-api-access-5mnbl") pod "9fa781fe-e51d-4912-b210-e873945bcbf8" (UID: "9fa781fe-e51d-4912-b210-e873945bcbf8"). InnerVolumeSpecName "kube-api-access-5mnbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.401506 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mnbl\" (UniqueName: \"kubernetes.io/projected/9fa781fe-e51d-4912-b210-e873945bcbf8-kube-api-access-5mnbl\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.401558 4755 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.401579 4755 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9fa781fe-e51d-4912-b210-e873945bcbf8-util\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.916161 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" event={"ID":"9fa781fe-e51d-4912-b210-e873945bcbf8","Type":"ContainerDied","Data":"edd689f9525030641296111d96e59aa2c942ab548dd37258178da52d95ab2fc1"} Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.916223 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edd689f9525030641296111d96e59aa2c942ab548dd37258178da52d95ab2fc1" Nov 24 01:23:07 crc kubenswrapper[4755]: I1124 01:23:07.916310 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.450855 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-qmm9f"] Nov 24 01:23:09 crc kubenswrapper[4755]: E1124 01:23:09.451397 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fa781fe-e51d-4912-b210-e873945bcbf8" containerName="util" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.451411 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fa781fe-e51d-4912-b210-e873945bcbf8" containerName="util" Nov 24 01:23:09 crc kubenswrapper[4755]: E1124 01:23:09.451425 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fa781fe-e51d-4912-b210-e873945bcbf8" containerName="pull" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.451433 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fa781fe-e51d-4912-b210-e873945bcbf8" containerName="pull" Nov 24 01:23:09 crc kubenswrapper[4755]: E1124 01:23:09.451456 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fa781fe-e51d-4912-b210-e873945bcbf8" containerName="extract" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.451464 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fa781fe-e51d-4912-b210-e873945bcbf8" containerName="extract" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.451584 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fa781fe-e51d-4912-b210-e873945bcbf8" containerName="extract" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.452033 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-557fdffb88-qmm9f" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.453674 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.453845 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.456563 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-ndz7r" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.465320 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-qmm9f"] Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.526227 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvxzn\" (UniqueName: \"kubernetes.io/projected/0594f79b-cc74-4be7-a0c0-605666ea9f19-kube-api-access-hvxzn\") pod \"nmstate-operator-557fdffb88-qmm9f\" (UID: \"0594f79b-cc74-4be7-a0c0-605666ea9f19\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-qmm9f" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.627402 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvxzn\" (UniqueName: \"kubernetes.io/projected/0594f79b-cc74-4be7-a0c0-605666ea9f19-kube-api-access-hvxzn\") pod \"nmstate-operator-557fdffb88-qmm9f\" (UID: \"0594f79b-cc74-4be7-a0c0-605666ea9f19\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-qmm9f" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.649398 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvxzn\" (UniqueName: \"kubernetes.io/projected/0594f79b-cc74-4be7-a0c0-605666ea9f19-kube-api-access-hvxzn\") pod \"nmstate-operator-557fdffb88-qmm9f\" (UID: \"0594f79b-cc74-4be7-a0c0-605666ea9f19\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-qmm9f" Nov 24 01:23:09 crc kubenswrapper[4755]: I1124 01:23:09.786781 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-557fdffb88-qmm9f" Nov 24 01:23:10 crc kubenswrapper[4755]: I1124 01:23:10.013647 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-qmm9f"] Nov 24 01:23:10 crc kubenswrapper[4755]: I1124 01:23:10.943436 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-557fdffb88-qmm9f" event={"ID":"0594f79b-cc74-4be7-a0c0-605666ea9f19","Type":"ContainerStarted","Data":"ac4b270a6619ce3f0e18d0f72ee38a32b94cec85cf0dabc57e7f3f6a0547827d"} Nov 24 01:23:12 crc kubenswrapper[4755]: I1124 01:23:12.958682 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-557fdffb88-qmm9f" event={"ID":"0594f79b-cc74-4be7-a0c0-605666ea9f19","Type":"ContainerStarted","Data":"9ceca017915bbbeb2f3fa4d54f795adcb4089f4077b72a2345b4a0b83c70e1f2"} Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.896276 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-557fdffb88-qmm9f" podStartSLOduration=2.945238116 podStartE2EDuration="4.896254818s" podCreationTimestamp="2025-11-24 01:23:09 +0000 UTC" firstStartedPulling="2025-11-24 01:23:10.023485516 +0000 UTC m=+614.709551017" lastFinishedPulling="2025-11-24 01:23:11.974502218 +0000 UTC m=+616.660567719" observedRunningTime="2025-11-24 01:23:12.983184746 +0000 UTC m=+617.669250297" watchObservedRunningTime="2025-11-24 01:23:13.896254818 +0000 UTC m=+618.582320319" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.899321 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr"] Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.900371 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.902767 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-zr5zw" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.924404 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d"] Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.925098 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.928456 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr"] Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.938973 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d"] Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.940026 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.955029 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-7hlnk"] Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.956966 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.983808 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3e04477a-f03b-4cd7-ba29-1622ea087da5-ovs-socket\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.984124 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d2fd6ef2-fcd0-4169-a26a-4f30b0619efa-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-6dn5d\" (UID: \"d2fd6ef2-fcd0-4169-a26a-4f30b0619efa\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.984157 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3e04477a-f03b-4cd7-ba29-1622ea087da5-dbus-socket\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.984174 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3e04477a-f03b-4cd7-ba29-1622ea087da5-nmstate-lock\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.984224 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgbjt\" (UniqueName: \"kubernetes.io/projected/3e04477a-f03b-4cd7-ba29-1622ea087da5-kube-api-access-rgbjt\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.984407 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sctfd\" (UniqueName: \"kubernetes.io/projected/e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277-kube-api-access-sctfd\") pod \"nmstate-metrics-5dcf9c57c5-db5zr\" (UID: \"e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr" Nov 24 01:23:13 crc kubenswrapper[4755]: I1124 01:23:13.984465 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsl8q\" (UniqueName: \"kubernetes.io/projected/d2fd6ef2-fcd0-4169-a26a-4f30b0619efa-kube-api-access-tsl8q\") pod \"nmstate-webhook-6b89b748d8-6dn5d\" (UID: \"d2fd6ef2-fcd0-4169-a26a-4f30b0619efa\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.034320 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f"] Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.035139 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.036916 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.037014 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-l6t6r" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.037122 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.044515 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f"] Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085273 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-bjf4f\" (UID: \"b0fbafa4-291f-4eee-8133-30e7a85ff7ff\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085327 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-bjf4f\" (UID: \"b0fbafa4-291f-4eee-8133-30e7a85ff7ff\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085362 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3e04477a-f03b-4cd7-ba29-1622ea087da5-ovs-socket\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085384 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d2fd6ef2-fcd0-4169-a26a-4f30b0619efa-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-6dn5d\" (UID: \"d2fd6ef2-fcd0-4169-a26a-4f30b0619efa\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085420 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3e04477a-f03b-4cd7-ba29-1622ea087da5-dbus-socket\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085437 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3e04477a-f03b-4cd7-ba29-1622ea087da5-nmstate-lock\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085468 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgbjt\" (UniqueName: \"kubernetes.io/projected/3e04477a-f03b-4cd7-ba29-1622ea087da5-kube-api-access-rgbjt\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085521 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sctfd\" (UniqueName: \"kubernetes.io/projected/e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277-kube-api-access-sctfd\") pod \"nmstate-metrics-5dcf9c57c5-db5zr\" (UID: \"e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085550 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6bsz\" (UniqueName: \"kubernetes.io/projected/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-kube-api-access-q6bsz\") pod \"nmstate-console-plugin-5874bd7bc5-bjf4f\" (UID: \"b0fbafa4-291f-4eee-8133-30e7a85ff7ff\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085578 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsl8q\" (UniqueName: \"kubernetes.io/projected/d2fd6ef2-fcd0-4169-a26a-4f30b0619efa-kube-api-access-tsl8q\") pod \"nmstate-webhook-6b89b748d8-6dn5d\" (UID: \"d2fd6ef2-fcd0-4169-a26a-4f30b0619efa\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.085988 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3e04477a-f03b-4cd7-ba29-1622ea087da5-ovs-socket\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:14 crc kubenswrapper[4755]: E1124 01:23:14.086041 4755 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Nov 24 01:23:14 crc kubenswrapper[4755]: E1124 01:23:14.086109 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d2fd6ef2-fcd0-4169-a26a-4f30b0619efa-tls-key-pair podName:d2fd6ef2-fcd0-4169-a26a-4f30b0619efa nodeName:}" failed. No retries permitted until 2025-11-24 01:23:14.586092765 +0000 UTC m=+619.272158266 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/d2fd6ef2-fcd0-4169-a26a-4f30b0619efa-tls-key-pair") pod "nmstate-webhook-6b89b748d8-6dn5d" (UID: "d2fd6ef2-fcd0-4169-a26a-4f30b0619efa") : secret "openshift-nmstate-webhook" not found Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.086134 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3e04477a-f03b-4cd7-ba29-1622ea087da5-dbus-socket\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.086043 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3e04477a-f03b-4cd7-ba29-1622ea087da5-nmstate-lock\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.103377 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsl8q\" (UniqueName: \"kubernetes.io/projected/d2fd6ef2-fcd0-4169-a26a-4f30b0619efa-kube-api-access-tsl8q\") pod \"nmstate-webhook-6b89b748d8-6dn5d\" (UID: \"d2fd6ef2-fcd0-4169-a26a-4f30b0619efa\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.103439 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sctfd\" (UniqueName: \"kubernetes.io/projected/e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277-kube-api-access-sctfd\") pod \"nmstate-metrics-5dcf9c57c5-db5zr\" (UID: \"e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.104144 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgbjt\" (UniqueName: \"kubernetes.io/projected/3e04477a-f03b-4cd7-ba29-1622ea087da5-kube-api-access-rgbjt\") pod \"nmstate-handler-7hlnk\" (UID: \"3e04477a-f03b-4cd7-ba29-1622ea087da5\") " pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.186948 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-bjf4f\" (UID: \"b0fbafa4-291f-4eee-8133-30e7a85ff7ff\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.187050 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6bsz\" (UniqueName: \"kubernetes.io/projected/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-kube-api-access-q6bsz\") pod \"nmstate-console-plugin-5874bd7bc5-bjf4f\" (UID: \"b0fbafa4-291f-4eee-8133-30e7a85ff7ff\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.187080 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-bjf4f\" (UID: \"b0fbafa4-291f-4eee-8133-30e7a85ff7ff\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: E1124 01:23:14.187148 4755 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Nov 24 01:23:14 crc kubenswrapper[4755]: E1124 01:23:14.187237 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-plugin-serving-cert podName:b0fbafa4-291f-4eee-8133-30e7a85ff7ff nodeName:}" failed. No retries permitted until 2025-11-24 01:23:14.687208437 +0000 UTC m=+619.373273938 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-plugin-serving-cert") pod "nmstate-console-plugin-5874bd7bc5-bjf4f" (UID: "b0fbafa4-291f-4eee-8133-30e7a85ff7ff") : secret "plugin-serving-cert" not found Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.187838 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-bjf4f\" (UID: \"b0fbafa4-291f-4eee-8133-30e7a85ff7ff\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.207082 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-986587678-jlq69"] Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.207922 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.212830 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6bsz\" (UniqueName: \"kubernetes.io/projected/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-kube-api-access-q6bsz\") pod \"nmstate-console-plugin-5874bd7bc5-bjf4f\" (UID: \"b0fbafa4-291f-4eee-8133-30e7a85ff7ff\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.218169 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-986587678-jlq69"] Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.227750 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.287258 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.287531 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-console-config\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.287566 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-trusted-ca-bundle\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.287615 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-service-ca\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.287725 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-console-serving-cert\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.287829 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxq2g\" (UniqueName: \"kubernetes.io/projected/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-kube-api-access-bxq2g\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.288110 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-oauth-serving-cert\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.288152 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-console-oauth-config\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.389218 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-service-ca\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.389566 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-console-serving-cert\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.389637 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxq2g\" (UniqueName: \"kubernetes.io/projected/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-kube-api-access-bxq2g\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.389683 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-oauth-serving-cert\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.389709 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-console-oauth-config\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.389738 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-console-config\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.389758 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-trusted-ca-bundle\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.390279 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-service-ca\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.391072 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-oauth-serving-cert\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.391213 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-console-config\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.392068 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-trusted-ca-bundle\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.395944 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-console-oauth-config\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.397777 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-console-serving-cert\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.408086 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxq2g\" (UniqueName: \"kubernetes.io/projected/2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4-kube-api-access-bxq2g\") pod \"console-986587678-jlq69\" (UID: \"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4\") " pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.440702 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr"] Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.594142 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d2fd6ef2-fcd0-4169-a26a-4f30b0619efa-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-6dn5d\" (UID: \"d2fd6ef2-fcd0-4169-a26a-4f30b0619efa\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.597170 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d2fd6ef2-fcd0-4169-a26a-4f30b0619efa-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-6dn5d\" (UID: \"d2fd6ef2-fcd0-4169-a26a-4f30b0619efa\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.599146 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.695414 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-bjf4f\" (UID: \"b0fbafa4-291f-4eee-8133-30e7a85ff7ff\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.699307 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0fbafa4-291f-4eee-8133-30e7a85ff7ff-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-bjf4f\" (UID: \"b0fbafa4-291f-4eee-8133-30e7a85ff7ff\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.783041 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-986587678-jlq69"] Nov 24 01:23:14 crc kubenswrapper[4755]: W1124 01:23:14.789014 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c2ee6db_96d6_4e2d_af4c_6c597cf3c6b4.slice/crio-17de6643f1336ac02cdd71e48a4131992972dff3489c858db31f6bef52ad5c06 WatchSource:0}: Error finding container 17de6643f1336ac02cdd71e48a4131992972dff3489c858db31f6bef52ad5c06: Status 404 returned error can't find the container with id 17de6643f1336ac02cdd71e48a4131992972dff3489c858db31f6bef52ad5c06 Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.855133 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.949312 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.972887 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr" event={"ID":"e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277","Type":"ContainerStarted","Data":"507b0cd3da9a91521ff2b89c6c35b130eef0432074d93497c51e1381aed72c49"} Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.973930 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-7hlnk" event={"ID":"3e04477a-f03b-4cd7-ba29-1622ea087da5","Type":"ContainerStarted","Data":"817560d3f5b01db20dab5208d4d06e7772fd7ac7dbd4e899d1a87ef41601f454"} Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.975995 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-986587678-jlq69" event={"ID":"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4","Type":"ContainerStarted","Data":"23e45c5f84252b647e810ce181070eac880f66a9f6707dad9cd566cd1f646ca3"} Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.976025 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-986587678-jlq69" event={"ID":"2c2ee6db-96d6-4e2d-af4c-6c597cf3c6b4","Type":"ContainerStarted","Data":"17de6643f1336ac02cdd71e48a4131992972dff3489c858db31f6bef52ad5c06"} Nov 24 01:23:14 crc kubenswrapper[4755]: I1124 01:23:14.995977 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-986587678-jlq69" podStartSLOduration=0.995955507 podStartE2EDuration="995.955507ms" podCreationTimestamp="2025-11-24 01:23:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:23:14.991349008 +0000 UTC m=+619.677414529" watchObservedRunningTime="2025-11-24 01:23:14.995955507 +0000 UTC m=+619.682021018" Nov 24 01:23:15 crc kubenswrapper[4755]: I1124 01:23:15.139456 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f"] Nov 24 01:23:15 crc kubenswrapper[4755]: I1124 01:23:15.231244 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d"] Nov 24 01:23:15 crc kubenswrapper[4755]: W1124 01:23:15.235632 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2fd6ef2_fcd0_4169_a26a_4f30b0619efa.slice/crio-2538e05f9cb07b42d0765fd82155987e8886583c2e01878b3238909b9c03dec5 WatchSource:0}: Error finding container 2538e05f9cb07b42d0765fd82155987e8886583c2e01878b3238909b9c03dec5: Status 404 returned error can't find the container with id 2538e05f9cb07b42d0765fd82155987e8886583c2e01878b3238909b9c03dec5 Nov 24 01:23:15 crc kubenswrapper[4755]: I1124 01:23:15.984397 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" event={"ID":"b0fbafa4-291f-4eee-8133-30e7a85ff7ff","Type":"ContainerStarted","Data":"7de04416158bb89d8e5936dd0f9433128fbe0969a4850ea146f91090ef03e3d9"} Nov 24 01:23:15 crc kubenswrapper[4755]: I1124 01:23:15.985678 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" event={"ID":"d2fd6ef2-fcd0-4169-a26a-4f30b0619efa","Type":"ContainerStarted","Data":"2538e05f9cb07b42d0765fd82155987e8886583c2e01878b3238909b9c03dec5"} Nov 24 01:23:18 crc kubenswrapper[4755]: I1124 01:23:18.004444 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-7hlnk" event={"ID":"3e04477a-f03b-4cd7-ba29-1622ea087da5","Type":"ContainerStarted","Data":"8a18d6a8166c98ab4b61d702887f6684b776c462bce7b312bfdb988d0f2a9283"} Nov 24 01:23:18 crc kubenswrapper[4755]: I1124 01:23:18.005223 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:18 crc kubenswrapper[4755]: I1124 01:23:18.005243 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" event={"ID":"b0fbafa4-291f-4eee-8133-30e7a85ff7ff","Type":"ContainerStarted","Data":"bed16417ad38e0d90c7a0f39de1216645ec801a304935713007708cf97ec1921"} Nov 24 01:23:18 crc kubenswrapper[4755]: I1124 01:23:18.005257 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" event={"ID":"d2fd6ef2-fcd0-4169-a26a-4f30b0619efa","Type":"ContainerStarted","Data":"bc7b30c8b69261e11871b6e50c4baeba8f4cd89b0428b907a3fb56519be3fd26"} Nov 24 01:23:18 crc kubenswrapper[4755]: I1124 01:23:18.005269 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr" event={"ID":"e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277","Type":"ContainerStarted","Data":"bf09bfa6b9fb3c3b9c931c4336a8385a6713b1e9830c11493b5d6cdc45afcd5f"} Nov 24 01:23:18 crc kubenswrapper[4755]: I1124 01:23:18.005284 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:18 crc kubenswrapper[4755]: I1124 01:23:18.011826 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-7hlnk" podStartSLOduration=1.605407179 podStartE2EDuration="5.01180915s" podCreationTimestamp="2025-11-24 01:23:13 +0000 UTC" firstStartedPulling="2025-11-24 01:23:14.317509056 +0000 UTC m=+619.003574557" lastFinishedPulling="2025-11-24 01:23:17.723911027 +0000 UTC m=+622.409976528" observedRunningTime="2025-11-24 01:23:18.01072275 +0000 UTC m=+622.696788321" watchObservedRunningTime="2025-11-24 01:23:18.01180915 +0000 UTC m=+622.697874651" Nov 24 01:23:18 crc kubenswrapper[4755]: I1124 01:23:18.027861 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" podStartSLOduration=2.50773766 podStartE2EDuration="5.027841989s" podCreationTimestamp="2025-11-24 01:23:13 +0000 UTC" firstStartedPulling="2025-11-24 01:23:15.239282702 +0000 UTC m=+619.925348203" lastFinishedPulling="2025-11-24 01:23:17.759387031 +0000 UTC m=+622.445452532" observedRunningTime="2025-11-24 01:23:18.023009994 +0000 UTC m=+622.709075505" watchObservedRunningTime="2025-11-24 01:23:18.027841989 +0000 UTC m=+622.713907490" Nov 24 01:23:18 crc kubenswrapper[4755]: I1124 01:23:18.042006 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-bjf4f" podStartSLOduration=1.468340766 podStartE2EDuration="4.041986765s" podCreationTimestamp="2025-11-24 01:23:14 +0000 UTC" firstStartedPulling="2025-11-24 01:23:15.145494795 +0000 UTC m=+619.831560296" lastFinishedPulling="2025-11-24 01:23:17.719140794 +0000 UTC m=+622.405206295" observedRunningTime="2025-11-24 01:23:18.040699559 +0000 UTC m=+622.726765060" watchObservedRunningTime="2025-11-24 01:23:18.041986765 +0000 UTC m=+622.728052276" Nov 24 01:23:21 crc kubenswrapper[4755]: I1124 01:23:21.025306 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr" event={"ID":"e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277","Type":"ContainerStarted","Data":"71666898dacedc312c7f93c9388e99cdddce47a40c8b07b03c0b96972f3b4800"} Nov 24 01:23:21 crc kubenswrapper[4755]: I1124 01:23:21.052907 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-db5zr" podStartSLOduration=2.484710005 podStartE2EDuration="8.052880929s" podCreationTimestamp="2025-11-24 01:23:13 +0000 UTC" firstStartedPulling="2025-11-24 01:23:14.44654864 +0000 UTC m=+619.132614141" lastFinishedPulling="2025-11-24 01:23:20.014719564 +0000 UTC m=+624.700785065" observedRunningTime="2025-11-24 01:23:21.044946867 +0000 UTC m=+625.731012398" watchObservedRunningTime="2025-11-24 01:23:21.052880929 +0000 UTC m=+625.738946470" Nov 24 01:23:24 crc kubenswrapper[4755]: I1124 01:23:24.322997 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-7hlnk" Nov 24 01:23:24 crc kubenswrapper[4755]: I1124 01:23:24.600002 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:24 crc kubenswrapper[4755]: I1124 01:23:24.600075 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:24 crc kubenswrapper[4755]: I1124 01:23:24.606580 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:25 crc kubenswrapper[4755]: I1124 01:23:25.058154 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-986587678-jlq69" Nov 24 01:23:25 crc kubenswrapper[4755]: I1124 01:23:25.165354 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-mlls8"] Nov 24 01:23:34 crc kubenswrapper[4755]: I1124 01:23:34.865469 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-6dn5d" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.496682 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp"] Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.498062 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.501929 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.540787 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp"] Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.666289 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cr6c\" (UniqueName: \"kubernetes.io/projected/1a72defe-0081-4267-ab64-1c844503e5cc-kube-api-access-6cr6c\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.666351 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.666381 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.767542 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cr6c\" (UniqueName: \"kubernetes.io/projected/1a72defe-0081-4267-ab64-1c844503e5cc-kube-api-access-6cr6c\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.767686 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.767739 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.768461 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.768654 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.799667 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cr6c\" (UniqueName: \"kubernetes.io/projected/1a72defe-0081-4267-ab64-1c844503e5cc-kube-api-access-6cr6c\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:46 crc kubenswrapper[4755]: I1124 01:23:46.814320 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:47 crc kubenswrapper[4755]: I1124 01:23:47.266837 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp"] Nov 24 01:23:48 crc kubenswrapper[4755]: I1124 01:23:48.208704 4755 generic.go:334] "Generic (PLEG): container finished" podID="1a72defe-0081-4267-ab64-1c844503e5cc" containerID="e592add9e5455d1fdd3425d1c17cf0bc052f773d430570b0e8a32a38a72acafe" exitCode=0 Nov 24 01:23:48 crc kubenswrapper[4755]: I1124 01:23:48.208915 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" event={"ID":"1a72defe-0081-4267-ab64-1c844503e5cc","Type":"ContainerDied","Data":"e592add9e5455d1fdd3425d1c17cf0bc052f773d430570b0e8a32a38a72acafe"} Nov 24 01:23:48 crc kubenswrapper[4755]: I1124 01:23:48.208968 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" event={"ID":"1a72defe-0081-4267-ab64-1c844503e5cc","Type":"ContainerStarted","Data":"38b7db69ec3954442c64b185096b0b19a7dd07ca1ea43a4518ee48d514b65412"} Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.215274 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-mlls8" podUID="65c5f11b-931e-4dc2-8c3e-c7180b94ec08" containerName="console" containerID="cri-o://98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb" gracePeriod=15 Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.224452 4755 generic.go:334] "Generic (PLEG): container finished" podID="1a72defe-0081-4267-ab64-1c844503e5cc" containerID="ebb6be0323c92fc030997ce970481b2f7fafc530e6af4062b3b4a94c942ca8d9" exitCode=0 Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.224487 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" event={"ID":"1a72defe-0081-4267-ab64-1c844503e5cc","Type":"ContainerDied","Data":"ebb6be0323c92fc030997ce970481b2f7fafc530e6af4062b3b4a94c942ca8d9"} Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.579964 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-mlls8_65c5f11b-931e-4dc2-8c3e-c7180b94ec08/console/0.log" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.580352 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.751505 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-oauth-serving-cert\") pod \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.751679 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-serving-cert\") pod \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.751771 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m22w7\" (UniqueName: \"kubernetes.io/projected/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-kube-api-access-m22w7\") pod \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.751821 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-trusted-ca-bundle\") pod \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.751863 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-config\") pod \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.751910 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-service-ca\") pod \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.751971 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-oauth-config\") pod \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\" (UID: \"65c5f11b-931e-4dc2-8c3e-c7180b94ec08\") " Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.752372 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "65c5f11b-931e-4dc2-8c3e-c7180b94ec08" (UID: "65c5f11b-931e-4dc2-8c3e-c7180b94ec08"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.752662 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "65c5f11b-931e-4dc2-8c3e-c7180b94ec08" (UID: "65c5f11b-931e-4dc2-8c3e-c7180b94ec08"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.752940 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-service-ca" (OuterVolumeSpecName: "service-ca") pod "65c5f11b-931e-4dc2-8c3e-c7180b94ec08" (UID: "65c5f11b-931e-4dc2-8c3e-c7180b94ec08"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.754055 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-config" (OuterVolumeSpecName: "console-config") pod "65c5f11b-931e-4dc2-8c3e-c7180b94ec08" (UID: "65c5f11b-931e-4dc2-8c3e-c7180b94ec08"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.760789 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "65c5f11b-931e-4dc2-8c3e-c7180b94ec08" (UID: "65c5f11b-931e-4dc2-8c3e-c7180b94ec08"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.761860 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "65c5f11b-931e-4dc2-8c3e-c7180b94ec08" (UID: "65c5f11b-931e-4dc2-8c3e-c7180b94ec08"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.763776 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-kube-api-access-m22w7" (OuterVolumeSpecName: "kube-api-access-m22w7") pod "65c5f11b-931e-4dc2-8c3e-c7180b94ec08" (UID: "65c5f11b-931e-4dc2-8c3e-c7180b94ec08"). InnerVolumeSpecName "kube-api-access-m22w7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.853718 4755 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.853767 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m22w7\" (UniqueName: \"kubernetes.io/projected/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-kube-api-access-m22w7\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.853777 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.853788 4755 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.853810 4755 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.853822 4755 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:50 crc kubenswrapper[4755]: I1124 01:23:50.853832 4755 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/65c5f11b-931e-4dc2-8c3e-c7180b94ec08-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.234708 4755 generic.go:334] "Generic (PLEG): container finished" podID="1a72defe-0081-4267-ab64-1c844503e5cc" containerID="cbe1aee11f51226039ebe48d115620c35f83d82a04de401caaa0893e2b5fb031" exitCode=0 Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.234811 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" event={"ID":"1a72defe-0081-4267-ab64-1c844503e5cc","Type":"ContainerDied","Data":"cbe1aee11f51226039ebe48d115620c35f83d82a04de401caaa0893e2b5fb031"} Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.238876 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-mlls8_65c5f11b-931e-4dc2-8c3e-c7180b94ec08/console/0.log" Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.238969 4755 generic.go:334] "Generic (PLEG): container finished" podID="65c5f11b-931e-4dc2-8c3e-c7180b94ec08" containerID="98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb" exitCode=2 Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.239017 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-mlls8" event={"ID":"65c5f11b-931e-4dc2-8c3e-c7180b94ec08","Type":"ContainerDied","Data":"98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb"} Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.239049 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-mlls8" Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.239069 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-mlls8" event={"ID":"65c5f11b-931e-4dc2-8c3e-c7180b94ec08","Type":"ContainerDied","Data":"9ddbc9c26d2db07f6aeac1a684db3dad2c1fa644de7e15e20e9ee00bb7c79f40"} Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.239108 4755 scope.go:117] "RemoveContainer" containerID="98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb" Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.265858 4755 scope.go:117] "RemoveContainer" containerID="98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb" Nov 24 01:23:51 crc kubenswrapper[4755]: E1124 01:23:51.266582 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb\": container with ID starting with 98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb not found: ID does not exist" containerID="98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb" Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.266640 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb"} err="failed to get container status \"98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb\": rpc error: code = NotFound desc = could not find container \"98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb\": container with ID starting with 98395b84a8c25e568091cf520ac60a00f8a2a7c9c68bba5866b5c99e92cd6edb not found: ID does not exist" Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.286264 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-mlls8"] Nov 24 01:23:51 crc kubenswrapper[4755]: I1124 01:23:51.290332 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-mlls8"] Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.010483 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65c5f11b-931e-4dc2-8c3e-c7180b94ec08" path="/var/lib/kubelet/pods/65c5f11b-931e-4dc2-8c3e-c7180b94ec08/volumes" Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.453361 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.478876 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-util\") pod \"1a72defe-0081-4267-ab64-1c844503e5cc\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.478958 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-bundle\") pod \"1a72defe-0081-4267-ab64-1c844503e5cc\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.479100 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cr6c\" (UniqueName: \"kubernetes.io/projected/1a72defe-0081-4267-ab64-1c844503e5cc-kube-api-access-6cr6c\") pod \"1a72defe-0081-4267-ab64-1c844503e5cc\" (UID: \"1a72defe-0081-4267-ab64-1c844503e5cc\") " Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.480233 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-bundle" (OuterVolumeSpecName: "bundle") pod "1a72defe-0081-4267-ab64-1c844503e5cc" (UID: "1a72defe-0081-4267-ab64-1c844503e5cc"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.485269 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a72defe-0081-4267-ab64-1c844503e5cc-kube-api-access-6cr6c" (OuterVolumeSpecName: "kube-api-access-6cr6c") pod "1a72defe-0081-4267-ab64-1c844503e5cc" (UID: "1a72defe-0081-4267-ab64-1c844503e5cc"). InnerVolumeSpecName "kube-api-access-6cr6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.497368 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-util" (OuterVolumeSpecName: "util") pod "1a72defe-0081-4267-ab64-1c844503e5cc" (UID: "1a72defe-0081-4267-ab64-1c844503e5cc"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.580902 4755 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.580943 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cr6c\" (UniqueName: \"kubernetes.io/projected/1a72defe-0081-4267-ab64-1c844503e5cc-kube-api-access-6cr6c\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:52 crc kubenswrapper[4755]: I1124 01:23:52.580958 4755 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1a72defe-0081-4267-ab64-1c844503e5cc-util\") on node \"crc\" DevicePath \"\"" Nov 24 01:23:53 crc kubenswrapper[4755]: I1124 01:23:53.264107 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" event={"ID":"1a72defe-0081-4267-ab64-1c844503e5cc","Type":"ContainerDied","Data":"38b7db69ec3954442c64b185096b0b19a7dd07ca1ea43a4518ee48d514b65412"} Nov 24 01:23:53 crc kubenswrapper[4755]: I1124 01:23:53.264163 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38b7db69ec3954442c64b185096b0b19a7dd07ca1ea43a4518ee48d514b65412" Nov 24 01:23:53 crc kubenswrapper[4755]: I1124 01:23:53.264264 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.630715 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc"] Nov 24 01:24:02 crc kubenswrapper[4755]: E1124 01:24:02.631375 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a72defe-0081-4267-ab64-1c844503e5cc" containerName="extract" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.631386 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a72defe-0081-4267-ab64-1c844503e5cc" containerName="extract" Nov 24 01:24:02 crc kubenswrapper[4755]: E1124 01:24:02.631393 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a72defe-0081-4267-ab64-1c844503e5cc" containerName="pull" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.631399 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a72defe-0081-4267-ab64-1c844503e5cc" containerName="pull" Nov 24 01:24:02 crc kubenswrapper[4755]: E1124 01:24:02.631406 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65c5f11b-931e-4dc2-8c3e-c7180b94ec08" containerName="console" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.631413 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="65c5f11b-931e-4dc2-8c3e-c7180b94ec08" containerName="console" Nov 24 01:24:02 crc kubenswrapper[4755]: E1124 01:24:02.631425 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a72defe-0081-4267-ab64-1c844503e5cc" containerName="util" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.631430 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a72defe-0081-4267-ab64-1c844503e5cc" containerName="util" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.631516 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a72defe-0081-4267-ab64-1c844503e5cc" containerName="extract" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.631529 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="65c5f11b-931e-4dc2-8c3e-c7180b94ec08" containerName="console" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.631889 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.634335 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.634428 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.634784 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.634928 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-spgg9" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.634944 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.645091 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc"] Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.702226 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/546c69cc-7307-405e-b5db-22ab6f25b47d-apiservice-cert\") pod \"metallb-operator-controller-manager-55d87b5596-gzttc\" (UID: \"546c69cc-7307-405e-b5db-22ab6f25b47d\") " pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.702286 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/546c69cc-7307-405e-b5db-22ab6f25b47d-webhook-cert\") pod \"metallb-operator-controller-manager-55d87b5596-gzttc\" (UID: \"546c69cc-7307-405e-b5db-22ab6f25b47d\") " pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.702310 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjfk9\" (UniqueName: \"kubernetes.io/projected/546c69cc-7307-405e-b5db-22ab6f25b47d-kube-api-access-wjfk9\") pod \"metallb-operator-controller-manager-55d87b5596-gzttc\" (UID: \"546c69cc-7307-405e-b5db-22ab6f25b47d\") " pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.803936 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/546c69cc-7307-405e-b5db-22ab6f25b47d-webhook-cert\") pod \"metallb-operator-controller-manager-55d87b5596-gzttc\" (UID: \"546c69cc-7307-405e-b5db-22ab6f25b47d\") " pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.803998 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjfk9\" (UniqueName: \"kubernetes.io/projected/546c69cc-7307-405e-b5db-22ab6f25b47d-kube-api-access-wjfk9\") pod \"metallb-operator-controller-manager-55d87b5596-gzttc\" (UID: \"546c69cc-7307-405e-b5db-22ab6f25b47d\") " pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.804088 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/546c69cc-7307-405e-b5db-22ab6f25b47d-apiservice-cert\") pod \"metallb-operator-controller-manager-55d87b5596-gzttc\" (UID: \"546c69cc-7307-405e-b5db-22ab6f25b47d\") " pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.811409 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/546c69cc-7307-405e-b5db-22ab6f25b47d-apiservice-cert\") pod \"metallb-operator-controller-manager-55d87b5596-gzttc\" (UID: \"546c69cc-7307-405e-b5db-22ab6f25b47d\") " pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.811481 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/546c69cc-7307-405e-b5db-22ab6f25b47d-webhook-cert\") pod \"metallb-operator-controller-manager-55d87b5596-gzttc\" (UID: \"546c69cc-7307-405e-b5db-22ab6f25b47d\") " pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.833380 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjfk9\" (UniqueName: \"kubernetes.io/projected/546c69cc-7307-405e-b5db-22ab6f25b47d-kube-api-access-wjfk9\") pod \"metallb-operator-controller-manager-55d87b5596-gzttc\" (UID: \"546c69cc-7307-405e-b5db-22ab6f25b47d\") " pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.948638 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.994530 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm"] Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.995731 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.999313 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-lfd2p" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.999532 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 24 01:24:02 crc kubenswrapper[4755]: I1124 01:24:02.999731 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.008340 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwq6h\" (UniqueName: \"kubernetes.io/projected/19c770cd-3557-4cc8-a06d-0597e9766be2-kube-api-access-wwq6h\") pod \"metallb-operator-webhook-server-7d8857ff56-s2ljm\" (UID: \"19c770cd-3557-4cc8-a06d-0597e9766be2\") " pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.008387 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19c770cd-3557-4cc8-a06d-0597e9766be2-webhook-cert\") pod \"metallb-operator-webhook-server-7d8857ff56-s2ljm\" (UID: \"19c770cd-3557-4cc8-a06d-0597e9766be2\") " pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.008408 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19c770cd-3557-4cc8-a06d-0597e9766be2-apiservice-cert\") pod \"metallb-operator-webhook-server-7d8857ff56-s2ljm\" (UID: \"19c770cd-3557-4cc8-a06d-0597e9766be2\") " pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.017542 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm"] Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.109232 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwq6h\" (UniqueName: \"kubernetes.io/projected/19c770cd-3557-4cc8-a06d-0597e9766be2-kube-api-access-wwq6h\") pod \"metallb-operator-webhook-server-7d8857ff56-s2ljm\" (UID: \"19c770cd-3557-4cc8-a06d-0597e9766be2\") " pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.109283 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19c770cd-3557-4cc8-a06d-0597e9766be2-webhook-cert\") pod \"metallb-operator-webhook-server-7d8857ff56-s2ljm\" (UID: \"19c770cd-3557-4cc8-a06d-0597e9766be2\") " pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.109305 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19c770cd-3557-4cc8-a06d-0597e9766be2-apiservice-cert\") pod \"metallb-operator-webhook-server-7d8857ff56-s2ljm\" (UID: \"19c770cd-3557-4cc8-a06d-0597e9766be2\") " pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.129631 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19c770cd-3557-4cc8-a06d-0597e9766be2-apiservice-cert\") pod \"metallb-operator-webhook-server-7d8857ff56-s2ljm\" (UID: \"19c770cd-3557-4cc8-a06d-0597e9766be2\") " pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.133579 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19c770cd-3557-4cc8-a06d-0597e9766be2-webhook-cert\") pod \"metallb-operator-webhook-server-7d8857ff56-s2ljm\" (UID: \"19c770cd-3557-4cc8-a06d-0597e9766be2\") " pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.134062 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwq6h\" (UniqueName: \"kubernetes.io/projected/19c770cd-3557-4cc8-a06d-0597e9766be2-kube-api-access-wwq6h\") pod \"metallb-operator-webhook-server-7d8857ff56-s2ljm\" (UID: \"19c770cd-3557-4cc8-a06d-0597e9766be2\") " pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.219436 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc"] Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.315008 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" event={"ID":"546c69cc-7307-405e-b5db-22ab6f25b47d","Type":"ContainerStarted","Data":"a8fea4ab77c57c41fd696e9a978a51625857956ff6e5b4f9ee541c52c5722847"} Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.326628 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:03 crc kubenswrapper[4755]: I1124 01:24:03.526408 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm"] Nov 24 01:24:03 crc kubenswrapper[4755]: W1124 01:24:03.533786 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19c770cd_3557_4cc8_a06d_0597e9766be2.slice/crio-657d680410ed07c5ee8bfdf40e4c652e2084d197b42ca535411787426b5d2fdd WatchSource:0}: Error finding container 657d680410ed07c5ee8bfdf40e4c652e2084d197b42ca535411787426b5d2fdd: Status 404 returned error can't find the container with id 657d680410ed07c5ee8bfdf40e4c652e2084d197b42ca535411787426b5d2fdd Nov 24 01:24:04 crc kubenswrapper[4755]: I1124 01:24:04.322789 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" event={"ID":"19c770cd-3557-4cc8-a06d-0597e9766be2","Type":"ContainerStarted","Data":"657d680410ed07c5ee8bfdf40e4c652e2084d197b42ca535411787426b5d2fdd"} Nov 24 01:24:07 crc kubenswrapper[4755]: I1124 01:24:07.342027 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" event={"ID":"546c69cc-7307-405e-b5db-22ab6f25b47d","Type":"ContainerStarted","Data":"794b6757d418c8590b924480fcd41524a810835afe1048c679426bdeb98dc2f1"} Nov 24 01:24:07 crc kubenswrapper[4755]: I1124 01:24:07.342595 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:07 crc kubenswrapper[4755]: I1124 01:24:07.364371 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" podStartSLOduration=2.281000408 podStartE2EDuration="5.364353479s" podCreationTimestamp="2025-11-24 01:24:02 +0000 UTC" firstStartedPulling="2025-11-24 01:24:03.229062487 +0000 UTC m=+667.915127978" lastFinishedPulling="2025-11-24 01:24:06.312415548 +0000 UTC m=+670.998481049" observedRunningTime="2025-11-24 01:24:07.361636573 +0000 UTC m=+672.047702104" watchObservedRunningTime="2025-11-24 01:24:07.364353479 +0000 UTC m=+672.050418980" Nov 24 01:24:08 crc kubenswrapper[4755]: I1124 01:24:08.350531 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" event={"ID":"19c770cd-3557-4cc8-a06d-0597e9766be2","Type":"ContainerStarted","Data":"236871520e338577c91e565822b9bdde1b4b8b8cc2bbc1dbc0357c64af697884"} Nov 24 01:24:08 crc kubenswrapper[4755]: I1124 01:24:08.350905 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:08 crc kubenswrapper[4755]: I1124 01:24:08.374071 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" podStartSLOduration=1.85627023 podStartE2EDuration="6.374054026s" podCreationTimestamp="2025-11-24 01:24:02 +0000 UTC" firstStartedPulling="2025-11-24 01:24:03.537093172 +0000 UTC m=+668.223158673" lastFinishedPulling="2025-11-24 01:24:08.054876928 +0000 UTC m=+672.740942469" observedRunningTime="2025-11-24 01:24:08.370791604 +0000 UTC m=+673.056857125" watchObservedRunningTime="2025-11-24 01:24:08.374054026 +0000 UTC m=+673.060119527" Nov 24 01:24:23 crc kubenswrapper[4755]: I1124 01:24:23.333169 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7d8857ff56-s2ljm" Nov 24 01:24:33 crc kubenswrapper[4755]: I1124 01:24:33.294543 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:24:33 crc kubenswrapper[4755]: I1124 01:24:33.294830 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:24:42 crc kubenswrapper[4755]: I1124 01:24:42.950518 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-55d87b5596-gzttc" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.737984 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-8wz9f"] Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.740224 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.742936 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.742954 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.743236 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-2nhbx" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.751790 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk"] Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.752597 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.758384 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.766436 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-metrics\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.766526 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-frr-conf\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.766565 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5218d7a8-6776-4f68-afa1-fc48e1d058f5-metrics-certs\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.766640 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/326eacf2-4f20-4577-b64c-5e5a55b8667a-cert\") pod \"frr-k8s-webhook-server-6998585d5-fd8hk\" (UID: \"326eacf2-4f20-4577-b64c-5e5a55b8667a\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.766663 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-reloader\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.766692 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzxqg\" (UniqueName: \"kubernetes.io/projected/5218d7a8-6776-4f68-afa1-fc48e1d058f5-kube-api-access-gzxqg\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.766723 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtxnv\" (UniqueName: \"kubernetes.io/projected/326eacf2-4f20-4577-b64c-5e5a55b8667a-kube-api-access-qtxnv\") pod \"frr-k8s-webhook-server-6998585d5-fd8hk\" (UID: \"326eacf2-4f20-4577-b64c-5e5a55b8667a\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.766785 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/5218d7a8-6776-4f68-afa1-fc48e1d058f5-frr-startup\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.766792 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk"] Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.766816 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-frr-sockets\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.841234 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-4qvvw"] Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.842413 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.845878 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.845992 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.846709 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-tlmzr" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.856571 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.857969 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6c7b4b5f48-lz2vt"] Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.858888 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.860270 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868325 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cf1d6671-b748-4db1-89b9-9ae4968f8297-cert\") pod \"controller-6c7b4b5f48-lz2vt\" (UID: \"cf1d6671-b748-4db1-89b9-9ae4968f8297\") " pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868370 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-memberlist\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868415 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-frr-conf\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868436 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5218d7a8-6776-4f68-afa1-fc48e1d058f5-metrics-certs\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868453 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsnbn\" (UniqueName: \"kubernetes.io/projected/06358ec3-9d2a-433d-8de9-5044c2e189a4-kube-api-access-lsnbn\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868473 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf1d6671-b748-4db1-89b9-9ae4968f8297-metrics-certs\") pod \"controller-6c7b4b5f48-lz2vt\" (UID: \"cf1d6671-b748-4db1-89b9-9ae4968f8297\") " pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868495 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/06358ec3-9d2a-433d-8de9-5044c2e189a4-metallb-excludel2\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868515 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/326eacf2-4f20-4577-b64c-5e5a55b8667a-cert\") pod \"frr-k8s-webhook-server-6998585d5-fd8hk\" (UID: \"326eacf2-4f20-4577-b64c-5e5a55b8667a\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868534 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8p4q\" (UniqueName: \"kubernetes.io/projected/cf1d6671-b748-4db1-89b9-9ae4968f8297-kube-api-access-v8p4q\") pod \"controller-6c7b4b5f48-lz2vt\" (UID: \"cf1d6671-b748-4db1-89b9-9ae4968f8297\") " pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868550 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-reloader\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868566 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzxqg\" (UniqueName: \"kubernetes.io/projected/5218d7a8-6776-4f68-afa1-fc48e1d058f5-kube-api-access-gzxqg\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868584 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtxnv\" (UniqueName: \"kubernetes.io/projected/326eacf2-4f20-4577-b64c-5e5a55b8667a-kube-api-access-qtxnv\") pod \"frr-k8s-webhook-server-6998585d5-fd8hk\" (UID: \"326eacf2-4f20-4577-b64c-5e5a55b8667a\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868644 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/5218d7a8-6776-4f68-afa1-fc48e1d058f5-frr-startup\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868668 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-frr-sockets\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868687 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-metrics-certs\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868713 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-metrics\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.868750 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-frr-conf\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.869040 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-metrics\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.869493 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-lz2vt"] Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.869598 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-reloader\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.869853 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/5218d7a8-6776-4f68-afa1-fc48e1d058f5-frr-sockets\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.870685 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/5218d7a8-6776-4f68-afa1-fc48e1d058f5-frr-startup\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.888052 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5218d7a8-6776-4f68-afa1-fc48e1d058f5-metrics-certs\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.888183 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/326eacf2-4f20-4577-b64c-5e5a55b8667a-cert\") pod \"frr-k8s-webhook-server-6998585d5-fd8hk\" (UID: \"326eacf2-4f20-4577-b64c-5e5a55b8667a\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.896780 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtxnv\" (UniqueName: \"kubernetes.io/projected/326eacf2-4f20-4577-b64c-5e5a55b8667a-kube-api-access-qtxnv\") pod \"frr-k8s-webhook-server-6998585d5-fd8hk\" (UID: \"326eacf2-4f20-4577-b64c-5e5a55b8667a\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.901148 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzxqg\" (UniqueName: \"kubernetes.io/projected/5218d7a8-6776-4f68-afa1-fc48e1d058f5-kube-api-access-gzxqg\") pod \"frr-k8s-8wz9f\" (UID: \"5218d7a8-6776-4f68-afa1-fc48e1d058f5\") " pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.969851 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cf1d6671-b748-4db1-89b9-9ae4968f8297-cert\") pod \"controller-6c7b4b5f48-lz2vt\" (UID: \"cf1d6671-b748-4db1-89b9-9ae4968f8297\") " pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.969904 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-memberlist\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.969952 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsnbn\" (UniqueName: \"kubernetes.io/projected/06358ec3-9d2a-433d-8de9-5044c2e189a4-kube-api-access-lsnbn\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.969979 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf1d6671-b748-4db1-89b9-9ae4968f8297-metrics-certs\") pod \"controller-6c7b4b5f48-lz2vt\" (UID: \"cf1d6671-b748-4db1-89b9-9ae4968f8297\") " pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.970009 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/06358ec3-9d2a-433d-8de9-5044c2e189a4-metallb-excludel2\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.970034 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8p4q\" (UniqueName: \"kubernetes.io/projected/cf1d6671-b748-4db1-89b9-9ae4968f8297-kube-api-access-v8p4q\") pod \"controller-6c7b4b5f48-lz2vt\" (UID: \"cf1d6671-b748-4db1-89b9-9ae4968f8297\") " pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.970078 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-metrics-certs\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: E1124 01:24:43.970101 4755 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 24 01:24:43 crc kubenswrapper[4755]: E1124 01:24:43.970149 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-memberlist podName:06358ec3-9d2a-433d-8de9-5044c2e189a4 nodeName:}" failed. No retries permitted until 2025-11-24 01:24:44.470134989 +0000 UTC m=+709.156200490 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-memberlist") pod "speaker-4qvvw" (UID: "06358ec3-9d2a-433d-8de9-5044c2e189a4") : secret "metallb-memberlist" not found Nov 24 01:24:43 crc kubenswrapper[4755]: E1124 01:24:43.970172 4755 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Nov 24 01:24:43 crc kubenswrapper[4755]: E1124 01:24:43.970209 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-metrics-certs podName:06358ec3-9d2a-433d-8de9-5044c2e189a4 nodeName:}" failed. No retries permitted until 2025-11-24 01:24:44.470195161 +0000 UTC m=+709.156260662 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-metrics-certs") pod "speaker-4qvvw" (UID: "06358ec3-9d2a-433d-8de9-5044c2e189a4") : secret "speaker-certs-secret" not found Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.970856 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/06358ec3-9d2a-433d-8de9-5044c2e189a4-metallb-excludel2\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.972943 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cf1d6671-b748-4db1-89b9-9ae4968f8297-cert\") pod \"controller-6c7b4b5f48-lz2vt\" (UID: \"cf1d6671-b748-4db1-89b9-9ae4968f8297\") " pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.973066 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf1d6671-b748-4db1-89b9-9ae4968f8297-metrics-certs\") pod \"controller-6c7b4b5f48-lz2vt\" (UID: \"cf1d6671-b748-4db1-89b9-9ae4968f8297\") " pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.984004 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsnbn\" (UniqueName: \"kubernetes.io/projected/06358ec3-9d2a-433d-8de9-5044c2e189a4-kube-api-access-lsnbn\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:43 crc kubenswrapper[4755]: I1124 01:24:43.984569 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8p4q\" (UniqueName: \"kubernetes.io/projected/cf1d6671-b748-4db1-89b9-9ae4968f8297-kube-api-access-v8p4q\") pod \"controller-6c7b4b5f48-lz2vt\" (UID: \"cf1d6671-b748-4db1-89b9-9ae4968f8297\") " pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:44 crc kubenswrapper[4755]: I1124 01:24:44.066686 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:44 crc kubenswrapper[4755]: I1124 01:24:44.085912 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" Nov 24 01:24:44 crc kubenswrapper[4755]: I1124 01:24:44.172717 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:44 crc kubenswrapper[4755]: I1124 01:24:44.476455 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-metrics-certs\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:44 crc kubenswrapper[4755]: I1124 01:24:44.476818 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-memberlist\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:44 crc kubenswrapper[4755]: E1124 01:24:44.476946 4755 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 24 01:24:44 crc kubenswrapper[4755]: E1124 01:24:44.477025 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-memberlist podName:06358ec3-9d2a-433d-8de9-5044c2e189a4 nodeName:}" failed. No retries permitted until 2025-11-24 01:24:45.477003619 +0000 UTC m=+710.163069120 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-memberlist") pod "speaker-4qvvw" (UID: "06358ec3-9d2a-433d-8de9-5044c2e189a4") : secret "metallb-memberlist" not found Nov 24 01:24:44 crc kubenswrapper[4755]: I1124 01:24:44.482071 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-metrics-certs\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:44 crc kubenswrapper[4755]: I1124 01:24:44.529485 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk"] Nov 24 01:24:44 crc kubenswrapper[4755]: W1124 01:24:44.534292 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod326eacf2_4f20_4577_b64c_5e5a55b8667a.slice/crio-8bfcc4b7106a473922121837d5679f6469f533ed67aaff040265af0466fdcd5e WatchSource:0}: Error finding container 8bfcc4b7106a473922121837d5679f6469f533ed67aaff040265af0466fdcd5e: Status 404 returned error can't find the container with id 8bfcc4b7106a473922121837d5679f6469f533ed67aaff040265af0466fdcd5e Nov 24 01:24:44 crc kubenswrapper[4755]: I1124 01:24:44.573803 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8wz9f" event={"ID":"5218d7a8-6776-4f68-afa1-fc48e1d058f5","Type":"ContainerStarted","Data":"671479fed8c078df3f075746ee511bc48af90d4a859f17833fd4fe829495d6a1"} Nov 24 01:24:44 crc kubenswrapper[4755]: I1124 01:24:44.575194 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" event={"ID":"326eacf2-4f20-4577-b64c-5e5a55b8667a","Type":"ContainerStarted","Data":"8bfcc4b7106a473922121837d5679f6469f533ed67aaff040265af0466fdcd5e"} Nov 24 01:24:44 crc kubenswrapper[4755]: I1124 01:24:44.597116 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-lz2vt"] Nov 24 01:24:44 crc kubenswrapper[4755]: W1124 01:24:44.599894 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf1d6671_b748_4db1_89b9_9ae4968f8297.slice/crio-e5c004abf248a61eb3898daf9c06e6b0874d9c98c2d5856ae06448f902fd5d79 WatchSource:0}: Error finding container e5c004abf248a61eb3898daf9c06e6b0874d9c98c2d5856ae06448f902fd5d79: Status 404 returned error can't find the container with id e5c004abf248a61eb3898daf9c06e6b0874d9c98c2d5856ae06448f902fd5d79 Nov 24 01:24:45 crc kubenswrapper[4755]: I1124 01:24:45.491036 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-memberlist\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:45 crc kubenswrapper[4755]: I1124 01:24:45.495225 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/06358ec3-9d2a-433d-8de9-5044c2e189a4-memberlist\") pod \"speaker-4qvvw\" (UID: \"06358ec3-9d2a-433d-8de9-5044c2e189a4\") " pod="metallb-system/speaker-4qvvw" Nov 24 01:24:45 crc kubenswrapper[4755]: I1124 01:24:45.581844 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-lz2vt" event={"ID":"cf1d6671-b748-4db1-89b9-9ae4968f8297","Type":"ContainerStarted","Data":"b842a55c98a80c51a1b7ce1b32759f4811883cc13c7a0e9cc6fcec7039f2691c"} Nov 24 01:24:45 crc kubenswrapper[4755]: I1124 01:24:45.581906 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-lz2vt" event={"ID":"cf1d6671-b748-4db1-89b9-9ae4968f8297","Type":"ContainerStarted","Data":"7384d0eb6313697b1645397d416f60c02136482b8b663ed36de18af3da38e020"} Nov 24 01:24:45 crc kubenswrapper[4755]: I1124 01:24:45.581919 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-lz2vt" event={"ID":"cf1d6671-b748-4db1-89b9-9ae4968f8297","Type":"ContainerStarted","Data":"e5c004abf248a61eb3898daf9c06e6b0874d9c98c2d5856ae06448f902fd5d79"} Nov 24 01:24:45 crc kubenswrapper[4755]: I1124 01:24:45.583873 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:45 crc kubenswrapper[4755]: I1124 01:24:45.621706 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6c7b4b5f48-lz2vt" podStartSLOduration=2.621691069 podStartE2EDuration="2.621691069s" podCreationTimestamp="2025-11-24 01:24:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:24:45.621082152 +0000 UTC m=+710.307147653" watchObservedRunningTime="2025-11-24 01:24:45.621691069 +0000 UTC m=+710.307756570" Nov 24 01:24:45 crc kubenswrapper[4755]: I1124 01:24:45.657298 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-4qvvw" Nov 24 01:24:45 crc kubenswrapper[4755]: W1124 01:24:45.681310 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06358ec3_9d2a_433d_8de9_5044c2e189a4.slice/crio-a34f3b5c44c5bf88afe016189af852fb1ac5cff276a6fbc4e12cc80648d10758 WatchSource:0}: Error finding container a34f3b5c44c5bf88afe016189af852fb1ac5cff276a6fbc4e12cc80648d10758: Status 404 returned error can't find the container with id a34f3b5c44c5bf88afe016189af852fb1ac5cff276a6fbc4e12cc80648d10758 Nov 24 01:24:46 crc kubenswrapper[4755]: I1124 01:24:46.594109 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-4qvvw" event={"ID":"06358ec3-9d2a-433d-8de9-5044c2e189a4","Type":"ContainerStarted","Data":"046eb30221b8fdac8a956395d96c2beb46681acdf351f58f64f4e31aee52964d"} Nov 24 01:24:46 crc kubenswrapper[4755]: I1124 01:24:46.594528 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-4qvvw" event={"ID":"06358ec3-9d2a-433d-8de9-5044c2e189a4","Type":"ContainerStarted","Data":"bbbe1800c572e7d6e9b841180a2d3b73b29fe3d7626d21f9ba149005531c7ef4"} Nov 24 01:24:46 crc kubenswrapper[4755]: I1124 01:24:46.594545 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-4qvvw" event={"ID":"06358ec3-9d2a-433d-8de9-5044c2e189a4","Type":"ContainerStarted","Data":"a34f3b5c44c5bf88afe016189af852fb1ac5cff276a6fbc4e12cc80648d10758"} Nov 24 01:24:46 crc kubenswrapper[4755]: I1124 01:24:46.613081 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-4qvvw" podStartSLOduration=3.613064342 podStartE2EDuration="3.613064342s" podCreationTimestamp="2025-11-24 01:24:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:24:46.610377747 +0000 UTC m=+711.296443258" watchObservedRunningTime="2025-11-24 01:24:46.613064342 +0000 UTC m=+711.299129843" Nov 24 01:24:51 crc kubenswrapper[4755]: I1124 01:24:51.627714 4755 generic.go:334] "Generic (PLEG): container finished" podID="5218d7a8-6776-4f68-afa1-fc48e1d058f5" containerID="99e237be14f210d87a236c30b13c3ff68b128a5b0b840d870b1ef1adcb2d98d8" exitCode=0 Nov 24 01:24:51 crc kubenswrapper[4755]: I1124 01:24:51.627780 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8wz9f" event={"ID":"5218d7a8-6776-4f68-afa1-fc48e1d058f5","Type":"ContainerDied","Data":"99e237be14f210d87a236c30b13c3ff68b128a5b0b840d870b1ef1adcb2d98d8"} Nov 24 01:24:51 crc kubenswrapper[4755]: I1124 01:24:51.630921 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" event={"ID":"326eacf2-4f20-4577-b64c-5e5a55b8667a","Type":"ContainerStarted","Data":"e46bf32184a4b9444c5fa8de52db3479761e34f8291a27210e94a05130de3554"} Nov 24 01:24:51 crc kubenswrapper[4755]: I1124 01:24:51.631175 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" Nov 24 01:24:51 crc kubenswrapper[4755]: I1124 01:24:51.684878 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" podStartSLOduration=2.346137051 podStartE2EDuration="8.684848794s" podCreationTimestamp="2025-11-24 01:24:43 +0000 UTC" firstStartedPulling="2025-11-24 01:24:44.53623992 +0000 UTC m=+709.222305421" lastFinishedPulling="2025-11-24 01:24:50.874951663 +0000 UTC m=+715.561017164" observedRunningTime="2025-11-24 01:24:51.681138227 +0000 UTC m=+716.367203768" watchObservedRunningTime="2025-11-24 01:24:51.684848794 +0000 UTC m=+716.370914335" Nov 24 01:24:52 crc kubenswrapper[4755]: I1124 01:24:52.641442 4755 generic.go:334] "Generic (PLEG): container finished" podID="5218d7a8-6776-4f68-afa1-fc48e1d058f5" containerID="62fe93d3df8e0c931c0925a4f3c157fd764073a3780e29db0b068ccddd59ebaf" exitCode=0 Nov 24 01:24:52 crc kubenswrapper[4755]: I1124 01:24:52.641534 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8wz9f" event={"ID":"5218d7a8-6776-4f68-afa1-fc48e1d058f5","Type":"ContainerDied","Data":"62fe93d3df8e0c931c0925a4f3c157fd764073a3780e29db0b068ccddd59ebaf"} Nov 24 01:24:53 crc kubenswrapper[4755]: I1124 01:24:53.651272 4755 generic.go:334] "Generic (PLEG): container finished" podID="5218d7a8-6776-4f68-afa1-fc48e1d058f5" containerID="3d1592db05b824613bb0d03cc1233553cba5ff6f3b97b04ab6319ac304d66301" exitCode=0 Nov 24 01:24:53 crc kubenswrapper[4755]: I1124 01:24:53.651355 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8wz9f" event={"ID":"5218d7a8-6776-4f68-afa1-fc48e1d058f5","Type":"ContainerDied","Data":"3d1592db05b824613bb0d03cc1233553cba5ff6f3b97b04ab6319ac304d66301"} Nov 24 01:24:54 crc kubenswrapper[4755]: I1124 01:24:54.180252 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6c7b4b5f48-lz2vt" Nov 24 01:24:54 crc kubenswrapper[4755]: I1124 01:24:54.662500 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8wz9f" event={"ID":"5218d7a8-6776-4f68-afa1-fc48e1d058f5","Type":"ContainerStarted","Data":"894d407489ee7dfeaffc0fe4266318823fe9be86bf895c2ed51d0866a76c3210"} Nov 24 01:24:54 crc kubenswrapper[4755]: I1124 01:24:54.662837 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8wz9f" event={"ID":"5218d7a8-6776-4f68-afa1-fc48e1d058f5","Type":"ContainerStarted","Data":"f55689d8408af255c4d22bf697122fae5bc80f3e7dd1bd572f14b476fa70293b"} Nov 24 01:24:54 crc kubenswrapper[4755]: I1124 01:24:54.662852 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8wz9f" event={"ID":"5218d7a8-6776-4f68-afa1-fc48e1d058f5","Type":"ContainerStarted","Data":"a69f374bd6221cf05d33782487aa8a101e825fcf762eaf4d081cfbb109e93914"} Nov 24 01:24:54 crc kubenswrapper[4755]: I1124 01:24:54.662865 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8wz9f" event={"ID":"5218d7a8-6776-4f68-afa1-fc48e1d058f5","Type":"ContainerStarted","Data":"f8cc5e2c0f2dd0b4c6931bbb88e55dfbc220426cd21c325c6887d1bbafbad694"} Nov 24 01:24:54 crc kubenswrapper[4755]: I1124 01:24:54.662883 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:54 crc kubenswrapper[4755]: I1124 01:24:54.662894 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8wz9f" event={"ID":"5218d7a8-6776-4f68-afa1-fc48e1d058f5","Type":"ContainerStarted","Data":"59c20202a2ba984bd3dc74aa974c967effa508372c05935e2845c5ea944a6a8f"} Nov 24 01:24:54 crc kubenswrapper[4755]: I1124 01:24:54.662905 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8wz9f" event={"ID":"5218d7a8-6776-4f68-afa1-fc48e1d058f5","Type":"ContainerStarted","Data":"2758e78a3565e5e6cff6600e29c13a25618d2eb0f7786e38e357ab92ebd0b36f"} Nov 24 01:24:54 crc kubenswrapper[4755]: I1124 01:24:54.684657 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-8wz9f" podStartSLOduration=4.966151144 podStartE2EDuration="11.684640064s" podCreationTimestamp="2025-11-24 01:24:43 +0000 UTC" firstStartedPulling="2025-11-24 01:24:44.164044575 +0000 UTC m=+708.850110076" lastFinishedPulling="2025-11-24 01:24:50.882533495 +0000 UTC m=+715.568598996" observedRunningTime="2025-11-24 01:24:54.681568843 +0000 UTC m=+719.367634344" watchObservedRunningTime="2025-11-24 01:24:54.684640064 +0000 UTC m=+719.370705565" Nov 24 01:24:55 crc kubenswrapper[4755]: I1124 01:24:55.658766 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-4qvvw" Nov 24 01:24:55 crc kubenswrapper[4755]: I1124 01:24:55.662680 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-4qvvw" Nov 24 01:24:58 crc kubenswrapper[4755]: I1124 01:24:58.540164 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-2z2q4"] Nov 24 01:24:58 crc kubenswrapper[4755]: I1124 01:24:58.541561 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2z2q4" Nov 24 01:24:58 crc kubenswrapper[4755]: I1124 01:24:58.545661 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 24 01:24:58 crc kubenswrapper[4755]: I1124 01:24:58.545953 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-lg7n6" Nov 24 01:24:58 crc kubenswrapper[4755]: I1124 01:24:58.546123 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 24 01:24:58 crc kubenswrapper[4755]: I1124 01:24:58.550094 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2z2q4"] Nov 24 01:24:58 crc kubenswrapper[4755]: I1124 01:24:58.676842 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2c46\" (UniqueName: \"kubernetes.io/projected/1d5cc1ef-48fa-42de-9301-1caa5e09d04c-kube-api-access-q2c46\") pod \"openstack-operator-index-2z2q4\" (UID: \"1d5cc1ef-48fa-42de-9301-1caa5e09d04c\") " pod="openstack-operators/openstack-operator-index-2z2q4" Nov 24 01:24:58 crc kubenswrapper[4755]: I1124 01:24:58.777918 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2c46\" (UniqueName: \"kubernetes.io/projected/1d5cc1ef-48fa-42de-9301-1caa5e09d04c-kube-api-access-q2c46\") pod \"openstack-operator-index-2z2q4\" (UID: \"1d5cc1ef-48fa-42de-9301-1caa5e09d04c\") " pod="openstack-operators/openstack-operator-index-2z2q4" Nov 24 01:24:58 crc kubenswrapper[4755]: I1124 01:24:58.796519 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2c46\" (UniqueName: \"kubernetes.io/projected/1d5cc1ef-48fa-42de-9301-1caa5e09d04c-kube-api-access-q2c46\") pod \"openstack-operator-index-2z2q4\" (UID: \"1d5cc1ef-48fa-42de-9301-1caa5e09d04c\") " pod="openstack-operators/openstack-operator-index-2z2q4" Nov 24 01:24:58 crc kubenswrapper[4755]: I1124 01:24:58.856859 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2z2q4" Nov 24 01:24:59 crc kubenswrapper[4755]: I1124 01:24:59.067356 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:59 crc kubenswrapper[4755]: I1124 01:24:59.106119 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:24:59 crc kubenswrapper[4755]: I1124 01:24:59.298897 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2z2q4"] Nov 24 01:24:59 crc kubenswrapper[4755]: I1124 01:24:59.704374 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2z2q4" event={"ID":"1d5cc1ef-48fa-42de-9301-1caa5e09d04c","Type":"ContainerStarted","Data":"7e632e0e114ee3de082f4028a630b7a773c706ce9b8eb53375f758024c3dfa6f"} Nov 24 01:25:01 crc kubenswrapper[4755]: I1124 01:25:01.719551 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2z2q4" event={"ID":"1d5cc1ef-48fa-42de-9301-1caa5e09d04c","Type":"ContainerStarted","Data":"b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a"} Nov 24 01:25:01 crc kubenswrapper[4755]: I1124 01:25:01.742824 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-2z2q4" podStartSLOduration=1.76938613 podStartE2EDuration="3.742800344s" podCreationTimestamp="2025-11-24 01:24:58 +0000 UTC" firstStartedPulling="2025-11-24 01:24:59.309807126 +0000 UTC m=+723.995872647" lastFinishedPulling="2025-11-24 01:25:01.28322136 +0000 UTC m=+725.969286861" observedRunningTime="2025-11-24 01:25:01.74114145 +0000 UTC m=+726.427207001" watchObservedRunningTime="2025-11-24 01:25:01.742800344 +0000 UTC m=+726.428865855" Nov 24 01:25:01 crc kubenswrapper[4755]: I1124 01:25:01.926381 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2z2q4"] Nov 24 01:25:02 crc kubenswrapper[4755]: I1124 01:25:02.540726 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-7f5r2"] Nov 24 01:25:02 crc kubenswrapper[4755]: I1124 01:25:02.542133 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7f5r2" Nov 24 01:25:02 crc kubenswrapper[4755]: I1124 01:25:02.549103 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-7f5r2"] Nov 24 01:25:02 crc kubenswrapper[4755]: I1124 01:25:02.644680 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jr5h\" (UniqueName: \"kubernetes.io/projected/ccfae768-f324-4db5-ac90-8fd333deca44-kube-api-access-9jr5h\") pod \"openstack-operator-index-7f5r2\" (UID: \"ccfae768-f324-4db5-ac90-8fd333deca44\") " pod="openstack-operators/openstack-operator-index-7f5r2" Nov 24 01:25:02 crc kubenswrapper[4755]: I1124 01:25:02.746048 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jr5h\" (UniqueName: \"kubernetes.io/projected/ccfae768-f324-4db5-ac90-8fd333deca44-kube-api-access-9jr5h\") pod \"openstack-operator-index-7f5r2\" (UID: \"ccfae768-f324-4db5-ac90-8fd333deca44\") " pod="openstack-operators/openstack-operator-index-7f5r2" Nov 24 01:25:02 crc kubenswrapper[4755]: I1124 01:25:02.766893 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jr5h\" (UniqueName: \"kubernetes.io/projected/ccfae768-f324-4db5-ac90-8fd333deca44-kube-api-access-9jr5h\") pod \"openstack-operator-index-7f5r2\" (UID: \"ccfae768-f324-4db5-ac90-8fd333deca44\") " pod="openstack-operators/openstack-operator-index-7f5r2" Nov 24 01:25:02 crc kubenswrapper[4755]: I1124 01:25:02.872553 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7f5r2" Nov 24 01:25:03 crc kubenswrapper[4755]: I1124 01:25:03.295814 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:25:03 crc kubenswrapper[4755]: I1124 01:25:03.296165 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:25:03 crc kubenswrapper[4755]: I1124 01:25:03.337423 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-7f5r2"] Nov 24 01:25:03 crc kubenswrapper[4755]: I1124 01:25:03.733863 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-2z2q4" podUID="1d5cc1ef-48fa-42de-9301-1caa5e09d04c" containerName="registry-server" containerID="cri-o://b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a" gracePeriod=2 Nov 24 01:25:03 crc kubenswrapper[4755]: I1124 01:25:03.734317 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7f5r2" event={"ID":"ccfae768-f324-4db5-ac90-8fd333deca44","Type":"ContainerStarted","Data":"54d6c170a5f03ac8277f862351ad428306f14aa3d3b4e81e5d90c094f74bfeb0"} Nov 24 01:25:03 crc kubenswrapper[4755]: I1124 01:25:03.734346 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7f5r2" event={"ID":"ccfae768-f324-4db5-ac90-8fd333deca44","Type":"ContainerStarted","Data":"4547ff744e941b0226803728c5912a2ffa19435b3c2890a31fa5d17c0f61c45a"} Nov 24 01:25:03 crc kubenswrapper[4755]: I1124 01:25:03.763707 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-7f5r2" podStartSLOduration=1.7055731189999999 podStartE2EDuration="1.763684076s" podCreationTimestamp="2025-11-24 01:25:02 +0000 UTC" firstStartedPulling="2025-11-24 01:25:03.358945134 +0000 UTC m=+728.045010675" lastFinishedPulling="2025-11-24 01:25:03.417056121 +0000 UTC m=+728.103121632" observedRunningTime="2025-11-24 01:25:03.759852086 +0000 UTC m=+728.445917677" watchObservedRunningTime="2025-11-24 01:25:03.763684076 +0000 UTC m=+728.449749577" Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.069954 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-8wz9f" Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.095820 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-6998585d5-fd8hk" Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.166156 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2z2q4" Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.268592 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2c46\" (UniqueName: \"kubernetes.io/projected/1d5cc1ef-48fa-42de-9301-1caa5e09d04c-kube-api-access-q2c46\") pod \"1d5cc1ef-48fa-42de-9301-1caa5e09d04c\" (UID: \"1d5cc1ef-48fa-42de-9301-1caa5e09d04c\") " Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.273263 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d5cc1ef-48fa-42de-9301-1caa5e09d04c-kube-api-access-q2c46" (OuterVolumeSpecName: "kube-api-access-q2c46") pod "1d5cc1ef-48fa-42de-9301-1caa5e09d04c" (UID: "1d5cc1ef-48fa-42de-9301-1caa5e09d04c"). InnerVolumeSpecName "kube-api-access-q2c46". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.369497 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2c46\" (UniqueName: \"kubernetes.io/projected/1d5cc1ef-48fa-42de-9301-1caa5e09d04c-kube-api-access-q2c46\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.742806 4755 generic.go:334] "Generic (PLEG): container finished" podID="1d5cc1ef-48fa-42de-9301-1caa5e09d04c" containerID="b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a" exitCode=0 Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.742947 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2z2q4" event={"ID":"1d5cc1ef-48fa-42de-9301-1caa5e09d04c","Type":"ContainerDied","Data":"b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a"} Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.743026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2z2q4" event={"ID":"1d5cc1ef-48fa-42de-9301-1caa5e09d04c","Type":"ContainerDied","Data":"7e632e0e114ee3de082f4028a630b7a773c706ce9b8eb53375f758024c3dfa6f"} Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.743060 4755 scope.go:117] "RemoveContainer" containerID="b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a" Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.743319 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2z2q4" Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.765407 4755 scope.go:117] "RemoveContainer" containerID="b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a" Nov 24 01:25:04 crc kubenswrapper[4755]: E1124 01:25:04.766077 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a\": container with ID starting with b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a not found: ID does not exist" containerID="b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a" Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.766122 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a"} err="failed to get container status \"b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a\": rpc error: code = NotFound desc = could not find container \"b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a\": container with ID starting with b1dbd96d8861bf9166f95f1ad96b856fee4c688c8a0ba27fc140aaff0fa9090a not found: ID does not exist" Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.799243 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2z2q4"] Nov 24 01:25:04 crc kubenswrapper[4755]: I1124 01:25:04.806250 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-2z2q4"] Nov 24 01:25:06 crc kubenswrapper[4755]: I1124 01:25:06.013850 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d5cc1ef-48fa-42de-9301-1caa5e09d04c" path="/var/lib/kubelet/pods/1d5cc1ef-48fa-42de-9301-1caa5e09d04c/volumes" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.432165 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2qz86"] Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.432852 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" podUID="1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" containerName="controller-manager" containerID="cri-o://7d00c25c1095cb12f7a49b857e83d14eb540b0633b4878a1705c9a295f39dd99" gracePeriod=30 Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.534511 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8"] Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.534832 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" podUID="85fb9244-9754-4924-b53c-51ccbf6a5220" containerName="route-controller-manager" containerID="cri-o://1edaa6e0504d8818e4a5ab2990280a35c8a4a20408a381ceb10c5909fd536f25" gracePeriod=30 Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.819138 4755 generic.go:334] "Generic (PLEG): container finished" podID="1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" containerID="7d00c25c1095cb12f7a49b857e83d14eb540b0633b4878a1705c9a295f39dd99" exitCode=0 Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.819219 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" event={"ID":"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8","Type":"ContainerDied","Data":"7d00c25c1095cb12f7a49b857e83d14eb540b0633b4878a1705c9a295f39dd99"} Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.819620 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" event={"ID":"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8","Type":"ContainerDied","Data":"5bea381c248e511e39c6c0529bbbc38a26970d8a045ed4106626230c7e561b0d"} Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.819638 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bea381c248e511e39c6c0529bbbc38a26970d8a045ed4106626230c7e561b0d" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.822462 4755 generic.go:334] "Generic (PLEG): container finished" podID="85fb9244-9754-4924-b53c-51ccbf6a5220" containerID="1edaa6e0504d8818e4a5ab2990280a35c8a4a20408a381ceb10c5909fd536f25" exitCode=0 Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.822513 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" event={"ID":"85fb9244-9754-4924-b53c-51ccbf6a5220","Type":"ContainerDied","Data":"1edaa6e0504d8818e4a5ab2990280a35c8a4a20408a381ceb10c5909fd536f25"} Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.850752 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.873672 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-7f5r2" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.873901 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-7f5r2" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.897148 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbsp6\" (UniqueName: \"kubernetes.io/projected/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-kube-api-access-qbsp6\") pod \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.897199 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-proxy-ca-bundles\") pod \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.897952 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" (UID: "1f1350c1-7f2f-4c43-9029-a33ab1eb24a8"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.898030 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-config\") pod \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.898059 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-client-ca\") pod \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.898545 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-client-ca" (OuterVolumeSpecName: "client-ca") pod "1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" (UID: "1f1350c1-7f2f-4c43-9029-a33ab1eb24a8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.898688 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-config" (OuterVolumeSpecName: "config") pod "1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" (UID: "1f1350c1-7f2f-4c43-9029-a33ab1eb24a8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.898823 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-serving-cert\") pod \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\" (UID: \"1f1350c1-7f2f-4c43-9029-a33ab1eb24a8\") " Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.899458 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.899478 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.899489 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.903439 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-7f5r2" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.906218 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" (UID: "1f1350c1-7f2f-4c43-9029-a33ab1eb24a8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.906631 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-kube-api-access-qbsp6" (OuterVolumeSpecName: "kube-api-access-qbsp6") pod "1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" (UID: "1f1350c1-7f2f-4c43-9029-a33ab1eb24a8"). InnerVolumeSpecName "kube-api-access-qbsp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:25:12 crc kubenswrapper[4755]: I1124 01:25:12.935914 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.000094 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85fb9244-9754-4924-b53c-51ccbf6a5220-serving-cert\") pod \"85fb9244-9754-4924-b53c-51ccbf6a5220\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.000389 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-config\") pod \"85fb9244-9754-4924-b53c-51ccbf6a5220\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.000856 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-client-ca\") pod \"85fb9244-9754-4924-b53c-51ccbf6a5220\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.000960 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbjcr\" (UniqueName: \"kubernetes.io/projected/85fb9244-9754-4924-b53c-51ccbf6a5220-kube-api-access-kbjcr\") pod \"85fb9244-9754-4924-b53c-51ccbf6a5220\" (UID: \"85fb9244-9754-4924-b53c-51ccbf6a5220\") " Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.001269 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.001284 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbsp6\" (UniqueName: \"kubernetes.io/projected/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8-kube-api-access-qbsp6\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.002337 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-client-ca" (OuterVolumeSpecName: "client-ca") pod "85fb9244-9754-4924-b53c-51ccbf6a5220" (UID: "85fb9244-9754-4924-b53c-51ccbf6a5220"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.002568 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-config" (OuterVolumeSpecName: "config") pod "85fb9244-9754-4924-b53c-51ccbf6a5220" (UID: "85fb9244-9754-4924-b53c-51ccbf6a5220"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.007921 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85fb9244-9754-4924-b53c-51ccbf6a5220-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "85fb9244-9754-4924-b53c-51ccbf6a5220" (UID: "85fb9244-9754-4924-b53c-51ccbf6a5220"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.007945 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85fb9244-9754-4924-b53c-51ccbf6a5220-kube-api-access-kbjcr" (OuterVolumeSpecName: "kube-api-access-kbjcr") pod "85fb9244-9754-4924-b53c-51ccbf6a5220" (UID: "85fb9244-9754-4924-b53c-51ccbf6a5220"). InnerVolumeSpecName "kube-api-access-kbjcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.102293 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85fb9244-9754-4924-b53c-51ccbf6a5220-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.102508 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.102566 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85fb9244-9754-4924-b53c-51ccbf6a5220-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.102668 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbjcr\" (UniqueName: \"kubernetes.io/projected/85fb9244-9754-4924-b53c-51ccbf6a5220-kube-api-access-kbjcr\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.830592 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" event={"ID":"85fb9244-9754-4924-b53c-51ccbf6a5220","Type":"ContainerDied","Data":"75ba5053a2b44c334ec6cc7cec9061199ce5e57a8486fc662bbf9272a09749a0"} Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.830703 4755 scope.go:117] "RemoveContainer" containerID="1edaa6e0504d8818e4a5ab2990280a35c8a4a20408a381ceb10c5909fd536f25" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.830923 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2qz86" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.830932 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.870690 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-7f5r2" Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.872890 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2qz86"] Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.877079 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2qz86"] Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.904209 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8"] Nov 24 01:25:13 crc kubenswrapper[4755]: I1124 01:25:13.911385 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4gmv8"] Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.003173 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" path="/var/lib/kubelet/pods/1f1350c1-7f2f-4c43-9029-a33ab1eb24a8/volumes" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.003990 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85fb9244-9754-4924-b53c-51ccbf6a5220" path="/var/lib/kubelet/pods/85fb9244-9754-4924-b53c-51ccbf6a5220/volumes" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.217445 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48"] Nov 24 01:25:14 crc kubenswrapper[4755]: E1124 01:25:14.218002 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" containerName="controller-manager" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.218016 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" containerName="controller-manager" Nov 24 01:25:14 crc kubenswrapper[4755]: E1124 01:25:14.218033 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d5cc1ef-48fa-42de-9301-1caa5e09d04c" containerName="registry-server" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.218041 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d5cc1ef-48fa-42de-9301-1caa5e09d04c" containerName="registry-server" Nov 24 01:25:14 crc kubenswrapper[4755]: E1124 01:25:14.218048 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85fb9244-9754-4924-b53c-51ccbf6a5220" containerName="route-controller-manager" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.218054 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="85fb9244-9754-4924-b53c-51ccbf6a5220" containerName="route-controller-manager" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.218156 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f1350c1-7f2f-4c43-9029-a33ab1eb24a8" containerName="controller-manager" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.218164 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d5cc1ef-48fa-42de-9301-1caa5e09d04c" containerName="registry-server" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.218176 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="85fb9244-9754-4924-b53c-51ccbf6a5220" containerName="route-controller-manager" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.218540 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.220748 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.220811 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.221034 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.221071 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.221210 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.223377 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.245518 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48"] Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.290626 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl"] Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.291509 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.300283 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.300547 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.300825 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.301305 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.301541 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.301793 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.305376 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.313465 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl"] Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.317933 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b462f67d-508e-4455-9f46-6f6a1abf698b-proxy-ca-bundles\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.317984 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18380875-09df-49f8-b322-563a6486501b-config\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.318013 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b462f67d-508e-4455-9f46-6f6a1abf698b-config\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.318033 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b462f67d-508e-4455-9f46-6f6a1abf698b-client-ca\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.318081 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b462f67d-508e-4455-9f46-6f6a1abf698b-serving-cert\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.318117 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18380875-09df-49f8-b322-563a6486501b-serving-cert\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.318166 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qj5b\" (UniqueName: \"kubernetes.io/projected/18380875-09df-49f8-b322-563a6486501b-kube-api-access-6qj5b\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.318193 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18380875-09df-49f8-b322-563a6486501b-client-ca\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.318233 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzq49\" (UniqueName: \"kubernetes.io/projected/b462f67d-508e-4455-9f46-6f6a1abf698b-kube-api-access-lzq49\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.419379 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qj5b\" (UniqueName: \"kubernetes.io/projected/18380875-09df-49f8-b322-563a6486501b-kube-api-access-6qj5b\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.419419 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18380875-09df-49f8-b322-563a6486501b-client-ca\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.419453 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzq49\" (UniqueName: \"kubernetes.io/projected/b462f67d-508e-4455-9f46-6f6a1abf698b-kube-api-access-lzq49\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.419479 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b462f67d-508e-4455-9f46-6f6a1abf698b-proxy-ca-bundles\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.419499 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18380875-09df-49f8-b322-563a6486501b-config\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.419516 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b462f67d-508e-4455-9f46-6f6a1abf698b-config\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.419530 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b462f67d-508e-4455-9f46-6f6a1abf698b-client-ca\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.419559 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b462f67d-508e-4455-9f46-6f6a1abf698b-serving-cert\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.419583 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18380875-09df-49f8-b322-563a6486501b-serving-cert\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.420812 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18380875-09df-49f8-b322-563a6486501b-client-ca\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.421016 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b462f67d-508e-4455-9f46-6f6a1abf698b-client-ca\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.421183 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b462f67d-508e-4455-9f46-6f6a1abf698b-proxy-ca-bundles\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.421453 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b462f67d-508e-4455-9f46-6f6a1abf698b-config\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.422136 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18380875-09df-49f8-b322-563a6486501b-config\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.425794 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b462f67d-508e-4455-9f46-6f6a1abf698b-serving-cert\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.433150 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18380875-09df-49f8-b322-563a6486501b-serving-cert\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.435079 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzq49\" (UniqueName: \"kubernetes.io/projected/b462f67d-508e-4455-9f46-6f6a1abf698b-kube-api-access-lzq49\") pod \"controller-manager-7ffd4696f9-dqvbl\" (UID: \"b462f67d-508e-4455-9f46-6f6a1abf698b\") " pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.437474 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qj5b\" (UniqueName: \"kubernetes.io/projected/18380875-09df-49f8-b322-563a6486501b-kube-api-access-6qj5b\") pod \"route-controller-manager-7bb7d7cd-tlw48\" (UID: \"18380875-09df-49f8-b322-563a6486501b\") " pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.574483 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.619695 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.782269 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48"] Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.837934 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" event={"ID":"18380875-09df-49f8-b322-563a6486501b","Type":"ContainerStarted","Data":"4d04659f89fbd2143596c3784d68630d97741e641559179a3d99210fa66b1d1f"} Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.966563 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm"] Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.968059 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.970719 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-6vzqq" Nov 24 01:25:14 crc kubenswrapper[4755]: I1124 01:25:14.971199 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm"] Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.026086 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-util\") pod \"7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.026162 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc5r9\" (UniqueName: \"kubernetes.io/projected/17800bdb-a186-4da5-aa5f-49925f2c6b5a-kube-api-access-gc5r9\") pod \"7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.026293 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-bundle\") pod \"7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.038220 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl"] Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.127414 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-bundle\") pod \"7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.127513 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-util\") pod \"7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.127576 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc5r9\" (UniqueName: \"kubernetes.io/projected/17800bdb-a186-4da5-aa5f-49925f2c6b5a-kube-api-access-gc5r9\") pod \"7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.128619 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-bundle\") pod \"7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.128732 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-util\") pod \"7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.147390 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc5r9\" (UniqueName: \"kubernetes.io/projected/17800bdb-a186-4da5-aa5f-49925f2c6b5a-kube-api-access-gc5r9\") pod \"7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.294080 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.781471 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm"] Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.845905 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" event={"ID":"18380875-09df-49f8-b322-563a6486501b","Type":"ContainerStarted","Data":"3c69b6151e10d2c2c4c30d8a56412efe0b12acbc1396d6a1c4d6f77243d04864"} Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.846167 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.848875 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" event={"ID":"17800bdb-a186-4da5-aa5f-49925f2c6b5a","Type":"ContainerStarted","Data":"5a674158102364e40c3c397fa4e4b5c28e1faa44fddc966a527ceca29c5c2d94"} Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.852160 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" event={"ID":"b462f67d-508e-4455-9f46-6f6a1abf698b","Type":"ContainerStarted","Data":"5e6376c07206a4be82d6249b01542a2a8151fe986eda62e94684ea1d50b1b911"} Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.852201 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" event={"ID":"b462f67d-508e-4455-9f46-6f6a1abf698b","Type":"ContainerStarted","Data":"522d81c77fc2f30aaa3e3b0cf3aca92b9a971b693984edb984248082b073c5b1"} Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.852999 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.880131 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7bb7d7cd-tlw48" podStartSLOduration=1.8801126479999999 podStartE2EDuration="1.880112648s" podCreationTimestamp="2025-11-24 01:25:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:25:15.877764846 +0000 UTC m=+740.563830397" watchObservedRunningTime="2025-11-24 01:25:15.880112648 +0000 UTC m=+740.566178159" Nov 24 01:25:15 crc kubenswrapper[4755]: I1124 01:25:15.947059 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" podStartSLOduration=3.947039276 podStartE2EDuration="3.947039276s" podCreationTimestamp="2025-11-24 01:25:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:25:15.943969855 +0000 UTC m=+740.630035366" watchObservedRunningTime="2025-11-24 01:25:15.947039276 +0000 UTC m=+740.633104777" Nov 24 01:25:16 crc kubenswrapper[4755]: I1124 01:25:16.861791 4755 generic.go:334] "Generic (PLEG): container finished" podID="17800bdb-a186-4da5-aa5f-49925f2c6b5a" containerID="6bb8563c27f9aec198a070e2fbdbe1121a285a6b1198c1491835ad4f3f649814" exitCode=0 Nov 24 01:25:16 crc kubenswrapper[4755]: I1124 01:25:16.861894 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" event={"ID":"17800bdb-a186-4da5-aa5f-49925f2c6b5a","Type":"ContainerDied","Data":"6bb8563c27f9aec198a070e2fbdbe1121a285a6b1198c1491835ad4f3f649814"} Nov 24 01:25:16 crc kubenswrapper[4755]: I1124 01:25:16.862797 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:16 crc kubenswrapper[4755]: I1124 01:25:16.870437 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7ffd4696f9-dqvbl" Nov 24 01:25:17 crc kubenswrapper[4755]: I1124 01:25:17.869935 4755 generic.go:334] "Generic (PLEG): container finished" podID="17800bdb-a186-4da5-aa5f-49925f2c6b5a" containerID="b49d2f0702dab901ce3e2cc7a0635d84c9181458a422d80509c150f841289fc5" exitCode=0 Nov 24 01:25:17 crc kubenswrapper[4755]: I1124 01:25:17.870045 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" event={"ID":"17800bdb-a186-4da5-aa5f-49925f2c6b5a","Type":"ContainerDied","Data":"b49d2f0702dab901ce3e2cc7a0635d84c9181458a422d80509c150f841289fc5"} Nov 24 01:25:18 crc kubenswrapper[4755]: I1124 01:25:18.881528 4755 generic.go:334] "Generic (PLEG): container finished" podID="17800bdb-a186-4da5-aa5f-49925f2c6b5a" containerID="8629149b4b9024a28dbe43bd359a9383135ad284870f587d8a1c78e375610d3e" exitCode=0 Nov 24 01:25:18 crc kubenswrapper[4755]: I1124 01:25:18.881639 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" event={"ID":"17800bdb-a186-4da5-aa5f-49925f2c6b5a","Type":"ContainerDied","Data":"8629149b4b9024a28dbe43bd359a9383135ad284870f587d8a1c78e375610d3e"} Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.196354 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.301845 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-util\") pod \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.302509 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc5r9\" (UniqueName: \"kubernetes.io/projected/17800bdb-a186-4da5-aa5f-49925f2c6b5a-kube-api-access-gc5r9\") pod \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.302637 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-bundle\") pod \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\" (UID: \"17800bdb-a186-4da5-aa5f-49925f2c6b5a\") " Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.303269 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-bundle" (OuterVolumeSpecName: "bundle") pod "17800bdb-a186-4da5-aa5f-49925f2c6b5a" (UID: "17800bdb-a186-4da5-aa5f-49925f2c6b5a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.307824 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17800bdb-a186-4da5-aa5f-49925f2c6b5a-kube-api-access-gc5r9" (OuterVolumeSpecName: "kube-api-access-gc5r9") pod "17800bdb-a186-4da5-aa5f-49925f2c6b5a" (UID: "17800bdb-a186-4da5-aa5f-49925f2c6b5a"). InnerVolumeSpecName "kube-api-access-gc5r9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.315480 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-util" (OuterVolumeSpecName: "util") pod "17800bdb-a186-4da5-aa5f-49925f2c6b5a" (UID: "17800bdb-a186-4da5-aa5f-49925f2c6b5a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.403757 4755 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.403813 4755 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/17800bdb-a186-4da5-aa5f-49925f2c6b5a-util\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.403822 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc5r9\" (UniqueName: \"kubernetes.io/projected/17800bdb-a186-4da5-aa5f-49925f2c6b5a-kube-api-access-gc5r9\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.826391 4755 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.896172 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" event={"ID":"17800bdb-a186-4da5-aa5f-49925f2c6b5a","Type":"ContainerDied","Data":"5a674158102364e40c3c397fa4e4b5c28e1faa44fddc966a527ceca29c5c2d94"} Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.896220 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a674158102364e40c3c397fa4e4b5c28e1faa44fddc966a527ceca29c5c2d94" Nov 24 01:25:20 crc kubenswrapper[4755]: I1124 01:25:20.896260 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.539361 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q"] Nov 24 01:25:27 crc kubenswrapper[4755]: E1124 01:25:27.540155 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17800bdb-a186-4da5-aa5f-49925f2c6b5a" containerName="util" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.540170 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="17800bdb-a186-4da5-aa5f-49925f2c6b5a" containerName="util" Nov 24 01:25:27 crc kubenswrapper[4755]: E1124 01:25:27.540187 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17800bdb-a186-4da5-aa5f-49925f2c6b5a" containerName="pull" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.540194 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="17800bdb-a186-4da5-aa5f-49925f2c6b5a" containerName="pull" Nov 24 01:25:27 crc kubenswrapper[4755]: E1124 01:25:27.540202 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17800bdb-a186-4da5-aa5f-49925f2c6b5a" containerName="extract" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.540210 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="17800bdb-a186-4da5-aa5f-49925f2c6b5a" containerName="extract" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.540346 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="17800bdb-a186-4da5-aa5f-49925f2c6b5a" containerName="extract" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.541104 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.542760 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-dw9jr" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.576869 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q"] Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.699638 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gkql\" (UniqueName: \"kubernetes.io/projected/3730c324-139f-4560-ac12-c8e0595a58cb-kube-api-access-7gkql\") pod \"openstack-operator-controller-operator-95bdd885d-rpv4q\" (UID: \"3730c324-139f-4560-ac12-c8e0595a58cb\") " pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.800975 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gkql\" (UniqueName: \"kubernetes.io/projected/3730c324-139f-4560-ac12-c8e0595a58cb-kube-api-access-7gkql\") pod \"openstack-operator-controller-operator-95bdd885d-rpv4q\" (UID: \"3730c324-139f-4560-ac12-c8e0595a58cb\") " pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.820168 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gkql\" (UniqueName: \"kubernetes.io/projected/3730c324-139f-4560-ac12-c8e0595a58cb-kube-api-access-7gkql\") pod \"openstack-operator-controller-operator-95bdd885d-rpv4q\" (UID: \"3730c324-139f-4560-ac12-c8e0595a58cb\") " pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" Nov 24 01:25:27 crc kubenswrapper[4755]: I1124 01:25:27.857345 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" Nov 24 01:25:28 crc kubenswrapper[4755]: I1124 01:25:28.343104 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q"] Nov 24 01:25:28 crc kubenswrapper[4755]: I1124 01:25:28.951051 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" event={"ID":"3730c324-139f-4560-ac12-c8e0595a58cb","Type":"ContainerStarted","Data":"994f984d51a3598b6009061e2347224ed5a92e489c8d4c83ba9dbde50c0c9a9c"} Nov 24 01:25:31 crc kubenswrapper[4755]: I1124 01:25:31.968093 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" event={"ID":"3730c324-139f-4560-ac12-c8e0595a58cb","Type":"ContainerStarted","Data":"2236070eb6dc459476cc79c6cb2cb461bbe288c81443bd4dfe8ba380569aa640"} Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.854885 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7l8nl"] Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.856091 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.866939 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf6gp\" (UniqueName: \"kubernetes.io/projected/f92be83e-5b90-41a9-b669-2ecb2639f68a-kube-api-access-xf6gp\") pod \"redhat-marketplace-7l8nl\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.866973 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-utilities\") pod \"redhat-marketplace-7l8nl\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.867042 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-catalog-content\") pod \"redhat-marketplace-7l8nl\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.877059 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7l8nl"] Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.968028 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-catalog-content\") pod \"redhat-marketplace-7l8nl\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.968094 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf6gp\" (UniqueName: \"kubernetes.io/projected/f92be83e-5b90-41a9-b669-2ecb2639f68a-kube-api-access-xf6gp\") pod \"redhat-marketplace-7l8nl\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.968115 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-utilities\") pod \"redhat-marketplace-7l8nl\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.968811 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-catalog-content\") pod \"redhat-marketplace-7l8nl\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.969015 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-utilities\") pod \"redhat-marketplace-7l8nl\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:32 crc kubenswrapper[4755]: I1124 01:25:32.989125 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf6gp\" (UniqueName: \"kubernetes.io/projected/f92be83e-5b90-41a9-b669-2ecb2639f68a-kube-api-access-xf6gp\") pod \"redhat-marketplace-7l8nl\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:33 crc kubenswrapper[4755]: I1124 01:25:33.177838 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:33 crc kubenswrapper[4755]: I1124 01:25:33.295595 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:25:33 crc kubenswrapper[4755]: I1124 01:25:33.295679 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:25:33 crc kubenswrapper[4755]: I1124 01:25:33.295736 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:25:33 crc kubenswrapper[4755]: I1124 01:25:33.296767 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"31c763569028cdcbeab7620c7ace03dd90f3c86c98eb54fc2ca5ba33d792fb99"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:25:33 crc kubenswrapper[4755]: I1124 01:25:33.296841 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://31c763569028cdcbeab7620c7ace03dd90f3c86c98eb54fc2ca5ba33d792fb99" gracePeriod=600 Nov 24 01:25:33 crc kubenswrapper[4755]: I1124 01:25:33.996374 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="31c763569028cdcbeab7620c7ace03dd90f3c86c98eb54fc2ca5ba33d792fb99" exitCode=0 Nov 24 01:25:34 crc kubenswrapper[4755]: I1124 01:25:34.012208 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"31c763569028cdcbeab7620c7ace03dd90f3c86c98eb54fc2ca5ba33d792fb99"} Nov 24 01:25:34 crc kubenswrapper[4755]: I1124 01:25:34.012263 4755 scope.go:117] "RemoveContainer" containerID="587d82f2c33616f73d21402a931fc68cfeb5ed9c5e1ee08ba40d1b70c50f1cdd" Nov 24 01:25:34 crc kubenswrapper[4755]: I1124 01:25:34.237232 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7l8nl"] Nov 24 01:25:34 crc kubenswrapper[4755]: W1124 01:25:34.244015 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf92be83e_5b90_41a9_b669_2ecb2639f68a.slice/crio-b92282e8bd5f5517bc67f3c8b0fac6633e7b8ef651e6857c2cf305bc78addebe WatchSource:0}: Error finding container b92282e8bd5f5517bc67f3c8b0fac6633e7b8ef651e6857c2cf305bc78addebe: Status 404 returned error can't find the container with id b92282e8bd5f5517bc67f3c8b0fac6633e7b8ef651e6857c2cf305bc78addebe Nov 24 01:25:35 crc kubenswrapper[4755]: I1124 01:25:35.005591 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" event={"ID":"3730c324-139f-4560-ac12-c8e0595a58cb","Type":"ContainerStarted","Data":"eead7446b2c5546e21f81e53358915fc2085b027e7246e951167d4bd3cc6abf0"} Nov 24 01:25:35 crc kubenswrapper[4755]: I1124 01:25:35.005978 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" Nov 24 01:25:35 crc kubenswrapper[4755]: I1124 01:25:35.009138 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"d1576ec75c38e5c634d28cddad8ee45995a487ee45005883f5a41207a6c2c9de"} Nov 24 01:25:35 crc kubenswrapper[4755]: I1124 01:25:35.011235 4755 generic.go:334] "Generic (PLEG): container finished" podID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerID="6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f" exitCode=0 Nov 24 01:25:35 crc kubenswrapper[4755]: I1124 01:25:35.011294 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l8nl" event={"ID":"f92be83e-5b90-41a9-b669-2ecb2639f68a","Type":"ContainerDied","Data":"6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f"} Nov 24 01:25:35 crc kubenswrapper[4755]: I1124 01:25:35.011325 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l8nl" event={"ID":"f92be83e-5b90-41a9-b669-2ecb2639f68a","Type":"ContainerStarted","Data":"b92282e8bd5f5517bc67f3c8b0fac6633e7b8ef651e6857c2cf305bc78addebe"} Nov 24 01:25:35 crc kubenswrapper[4755]: I1124 01:25:35.060186 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" podStartSLOduration=2.547096913 podStartE2EDuration="8.06015576s" podCreationTimestamp="2025-11-24 01:25:27 +0000 UTC" firstStartedPulling="2025-11-24 01:25:28.341475719 +0000 UTC m=+753.027541220" lastFinishedPulling="2025-11-24 01:25:33.854534566 +0000 UTC m=+758.540600067" observedRunningTime="2025-11-24 01:25:35.04528795 +0000 UTC m=+759.731353471" watchObservedRunningTime="2025-11-24 01:25:35.06015576 +0000 UTC m=+759.746221261" Nov 24 01:25:36 crc kubenswrapper[4755]: I1124 01:25:36.027574 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-95bdd885d-rpv4q" Nov 24 01:25:37 crc kubenswrapper[4755]: I1124 01:25:37.032803 4755 generic.go:334] "Generic (PLEG): container finished" podID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerID="b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1" exitCode=0 Nov 24 01:25:37 crc kubenswrapper[4755]: I1124 01:25:37.032904 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l8nl" event={"ID":"f92be83e-5b90-41a9-b669-2ecb2639f68a","Type":"ContainerDied","Data":"b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1"} Nov 24 01:25:37 crc kubenswrapper[4755]: I1124 01:25:37.838594 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mf98s"] Nov 24 01:25:37 crc kubenswrapper[4755]: I1124 01:25:37.840126 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:37 crc kubenswrapper[4755]: I1124 01:25:37.856547 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mf98s"] Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.039833 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-utilities\") pod \"redhat-operators-mf98s\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.040852 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj6f8\" (UniqueName: \"kubernetes.io/projected/81c4cadf-6ecf-43b3-88d2-9194129a34e9-kube-api-access-mj6f8\") pod \"redhat-operators-mf98s\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.041217 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-catalog-content\") pod \"redhat-operators-mf98s\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.042741 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l8nl" event={"ID":"f92be83e-5b90-41a9-b669-2ecb2639f68a","Type":"ContainerStarted","Data":"de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0"} Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.063874 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7l8nl" podStartSLOduration=3.539571726 podStartE2EDuration="6.063855033s" podCreationTimestamp="2025-11-24 01:25:32 +0000 UTC" firstStartedPulling="2025-11-24 01:25:35.013527465 +0000 UTC m=+759.699592976" lastFinishedPulling="2025-11-24 01:25:37.537810782 +0000 UTC m=+762.223876283" observedRunningTime="2025-11-24 01:25:38.061920912 +0000 UTC m=+762.747986413" watchObservedRunningTime="2025-11-24 01:25:38.063855033 +0000 UTC m=+762.749920544" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.142315 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-catalog-content\") pod \"redhat-operators-mf98s\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.142398 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-utilities\") pod \"redhat-operators-mf98s\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.142424 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj6f8\" (UniqueName: \"kubernetes.io/projected/81c4cadf-6ecf-43b3-88d2-9194129a34e9-kube-api-access-mj6f8\") pod \"redhat-operators-mf98s\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.143369 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-catalog-content\") pod \"redhat-operators-mf98s\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.143686 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-utilities\") pod \"redhat-operators-mf98s\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.162310 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj6f8\" (UniqueName: \"kubernetes.io/projected/81c4cadf-6ecf-43b3-88d2-9194129a34e9-kube-api-access-mj6f8\") pod \"redhat-operators-mf98s\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.459382 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:38 crc kubenswrapper[4755]: I1124 01:25:38.893450 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mf98s"] Nov 24 01:25:38 crc kubenswrapper[4755]: W1124 01:25:38.899372 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81c4cadf_6ecf_43b3_88d2_9194129a34e9.slice/crio-5fa9314e43c9b3b7566c27c8ef96e5316919913ac1729ed46b0feba3cc76eb47 WatchSource:0}: Error finding container 5fa9314e43c9b3b7566c27c8ef96e5316919913ac1729ed46b0feba3cc76eb47: Status 404 returned error can't find the container with id 5fa9314e43c9b3b7566c27c8ef96e5316919913ac1729ed46b0feba3cc76eb47 Nov 24 01:25:39 crc kubenswrapper[4755]: I1124 01:25:39.050057 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mf98s" event={"ID":"81c4cadf-6ecf-43b3-88d2-9194129a34e9","Type":"ContainerStarted","Data":"a8c8dd811f7cb8151cdc29e9a5769d615f87c858007038c47699e53720a12c80"} Nov 24 01:25:39 crc kubenswrapper[4755]: I1124 01:25:39.050116 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mf98s" event={"ID":"81c4cadf-6ecf-43b3-88d2-9194129a34e9","Type":"ContainerStarted","Data":"5fa9314e43c9b3b7566c27c8ef96e5316919913ac1729ed46b0feba3cc76eb47"} Nov 24 01:25:40 crc kubenswrapper[4755]: I1124 01:25:40.055846 4755 generic.go:334] "Generic (PLEG): container finished" podID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerID="a8c8dd811f7cb8151cdc29e9a5769d615f87c858007038c47699e53720a12c80" exitCode=0 Nov 24 01:25:40 crc kubenswrapper[4755]: I1124 01:25:40.055888 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mf98s" event={"ID":"81c4cadf-6ecf-43b3-88d2-9194129a34e9","Type":"ContainerDied","Data":"a8c8dd811f7cb8151cdc29e9a5769d615f87c858007038c47699e53720a12c80"} Nov 24 01:25:41 crc kubenswrapper[4755]: I1124 01:25:41.063217 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mf98s" event={"ID":"81c4cadf-6ecf-43b3-88d2-9194129a34e9","Type":"ContainerStarted","Data":"25ad85ae1511ef84637aca16ba383e90f37c3cd779f2b29cdd3a607fc4896553"} Nov 24 01:25:42 crc kubenswrapper[4755]: I1124 01:25:42.100718 4755 generic.go:334] "Generic (PLEG): container finished" podID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerID="25ad85ae1511ef84637aca16ba383e90f37c3cd779f2b29cdd3a607fc4896553" exitCode=0 Nov 24 01:25:42 crc kubenswrapper[4755]: I1124 01:25:42.100765 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mf98s" event={"ID":"81c4cadf-6ecf-43b3-88d2-9194129a34e9","Type":"ContainerDied","Data":"25ad85ae1511ef84637aca16ba383e90f37c3cd779f2b29cdd3a607fc4896553"} Nov 24 01:25:43 crc kubenswrapper[4755]: I1124 01:25:43.107770 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mf98s" event={"ID":"81c4cadf-6ecf-43b3-88d2-9194129a34e9","Type":"ContainerStarted","Data":"7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203"} Nov 24 01:25:43 crc kubenswrapper[4755]: I1124 01:25:43.128747 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mf98s" podStartSLOduration=3.699025012 podStartE2EDuration="6.128729305s" podCreationTimestamp="2025-11-24 01:25:37 +0000 UTC" firstStartedPulling="2025-11-24 01:25:40.057224972 +0000 UTC m=+764.743290473" lastFinishedPulling="2025-11-24 01:25:42.486929275 +0000 UTC m=+767.172994766" observedRunningTime="2025-11-24 01:25:43.125325486 +0000 UTC m=+767.811390987" watchObservedRunningTime="2025-11-24 01:25:43.128729305 +0000 UTC m=+767.814794806" Nov 24 01:25:43 crc kubenswrapper[4755]: I1124 01:25:43.178177 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:43 crc kubenswrapper[4755]: I1124 01:25:43.178219 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:43 crc kubenswrapper[4755]: I1124 01:25:43.236289 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:44 crc kubenswrapper[4755]: I1124 01:25:44.153994 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:46 crc kubenswrapper[4755]: I1124 01:25:46.831260 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7l8nl"] Nov 24 01:25:46 crc kubenswrapper[4755]: I1124 01:25:46.831956 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7l8nl" podUID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerName="registry-server" containerID="cri-o://de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0" gracePeriod=2 Nov 24 01:25:47 crc kubenswrapper[4755]: I1124 01:25:47.918707 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.082196 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf6gp\" (UniqueName: \"kubernetes.io/projected/f92be83e-5b90-41a9-b669-2ecb2639f68a-kube-api-access-xf6gp\") pod \"f92be83e-5b90-41a9-b669-2ecb2639f68a\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.082285 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-utilities\") pod \"f92be83e-5b90-41a9-b669-2ecb2639f68a\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.082355 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-catalog-content\") pod \"f92be83e-5b90-41a9-b669-2ecb2639f68a\" (UID: \"f92be83e-5b90-41a9-b669-2ecb2639f68a\") " Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.083277 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-utilities" (OuterVolumeSpecName: "utilities") pod "f92be83e-5b90-41a9-b669-2ecb2639f68a" (UID: "f92be83e-5b90-41a9-b669-2ecb2639f68a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.089165 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f92be83e-5b90-41a9-b669-2ecb2639f68a-kube-api-access-xf6gp" (OuterVolumeSpecName: "kube-api-access-xf6gp") pod "f92be83e-5b90-41a9-b669-2ecb2639f68a" (UID: "f92be83e-5b90-41a9-b669-2ecb2639f68a"). InnerVolumeSpecName "kube-api-access-xf6gp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.101773 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f92be83e-5b90-41a9-b669-2ecb2639f68a" (UID: "f92be83e-5b90-41a9-b669-2ecb2639f68a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.140593 4755 generic.go:334] "Generic (PLEG): container finished" podID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerID="de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0" exitCode=0 Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.140673 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7l8nl" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.140659 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l8nl" event={"ID":"f92be83e-5b90-41a9-b669-2ecb2639f68a","Type":"ContainerDied","Data":"de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0"} Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.140820 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l8nl" event={"ID":"f92be83e-5b90-41a9-b669-2ecb2639f68a","Type":"ContainerDied","Data":"b92282e8bd5f5517bc67f3c8b0fac6633e7b8ef651e6857c2cf305bc78addebe"} Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.140840 4755 scope.go:117] "RemoveContainer" containerID="de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.160595 4755 scope.go:117] "RemoveContainer" containerID="b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.168745 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7l8nl"] Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.175443 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7l8nl"] Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.178449 4755 scope.go:117] "RemoveContainer" containerID="6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.185561 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf6gp\" (UniqueName: \"kubernetes.io/projected/f92be83e-5b90-41a9-b669-2ecb2639f68a-kube-api-access-xf6gp\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.185621 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.185634 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f92be83e-5b90-41a9-b669-2ecb2639f68a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.201661 4755 scope.go:117] "RemoveContainer" containerID="de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0" Nov 24 01:25:48 crc kubenswrapper[4755]: E1124 01:25:48.202118 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0\": container with ID starting with de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0 not found: ID does not exist" containerID="de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.202148 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0"} err="failed to get container status \"de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0\": rpc error: code = NotFound desc = could not find container \"de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0\": container with ID starting with de0bbecd33709794965bb4a2c3258382a3a45b9ad5251f3c405ad3cefeeae5b0 not found: ID does not exist" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.202176 4755 scope.go:117] "RemoveContainer" containerID="b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1" Nov 24 01:25:48 crc kubenswrapper[4755]: E1124 01:25:48.202471 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1\": container with ID starting with b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1 not found: ID does not exist" containerID="b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.202494 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1"} err="failed to get container status \"b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1\": rpc error: code = NotFound desc = could not find container \"b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1\": container with ID starting with b583a7b74bd343db150dc76d1d9b82d335e9720bcfa01bd71802536ca45819d1 not found: ID does not exist" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.202509 4755 scope.go:117] "RemoveContainer" containerID="6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f" Nov 24 01:25:48 crc kubenswrapper[4755]: E1124 01:25:48.202784 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f\": container with ID starting with 6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f not found: ID does not exist" containerID="6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.202855 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f"} err="failed to get container status \"6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f\": rpc error: code = NotFound desc = could not find container \"6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f\": container with ID starting with 6536c00e4f755a90daa3b7621a8a5242e19ad12f039262a9a38e7a1321c0c82f not found: ID does not exist" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.459802 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:48 crc kubenswrapper[4755]: I1124 01:25:48.459876 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.506801 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mf98s" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerName="registry-server" probeResult="failure" output=< Nov 24 01:25:49 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Nov 24 01:25:49 crc kubenswrapper[4755]: > Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.633863 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-65p2h"] Nov 24 01:25:49 crc kubenswrapper[4755]: E1124 01:25:49.634348 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerName="registry-server" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.634360 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerName="registry-server" Nov 24 01:25:49 crc kubenswrapper[4755]: E1124 01:25:49.634370 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerName="extract-utilities" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.634376 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerName="extract-utilities" Nov 24 01:25:49 crc kubenswrapper[4755]: E1124 01:25:49.634389 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerName="extract-content" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.634396 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerName="extract-content" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.634513 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f92be83e-5b90-41a9-b669-2ecb2639f68a" containerName="registry-server" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.635326 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.648622 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-65p2h"] Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.713922 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-utilities\") pod \"community-operators-65p2h\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.714070 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c8vt\" (UniqueName: \"kubernetes.io/projected/7de87f2f-d51b-476a-a3e2-570b6dadedb6-kube-api-access-9c8vt\") pod \"community-operators-65p2h\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.714412 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-catalog-content\") pod \"community-operators-65p2h\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.815303 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-catalog-content\") pod \"community-operators-65p2h\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.816107 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-utilities\") pod \"community-operators-65p2h\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.816473 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c8vt\" (UniqueName: \"kubernetes.io/projected/7de87f2f-d51b-476a-a3e2-570b6dadedb6-kube-api-access-9c8vt\") pod \"community-operators-65p2h\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.815843 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-catalog-content\") pod \"community-operators-65p2h\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.816405 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-utilities\") pod \"community-operators-65p2h\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.839297 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c8vt\" (UniqueName: \"kubernetes.io/projected/7de87f2f-d51b-476a-a3e2-570b6dadedb6-kube-api-access-9c8vt\") pod \"community-operators-65p2h\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:49 crc kubenswrapper[4755]: I1124 01:25:49.951597 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.007196 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f92be83e-5b90-41a9-b669-2ecb2639f68a" path="/var/lib/kubelet/pods/f92be83e-5b90-41a9-b669-2ecb2639f68a/volumes" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.412825 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-65p2h"] Nov 24 01:25:50 crc kubenswrapper[4755]: W1124 01:25:50.420494 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7de87f2f_d51b_476a_a3e2_570b6dadedb6.slice/crio-c2384c8186af41fbbd6f6c22b23b45ca520acc269ba0a2ddb8cd4245e804ec90 WatchSource:0}: Error finding container c2384c8186af41fbbd6f6c22b23b45ca520acc269ba0a2ddb8cd4245e804ec90: Status 404 returned error can't find the container with id c2384c8186af41fbbd6f6c22b23b45ca520acc269ba0a2ddb8cd4245e804ec90 Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.434797 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8gzgn"] Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.436011 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.482237 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8gzgn"] Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.525391 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g74qh\" (UniqueName: \"kubernetes.io/projected/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-kube-api-access-g74qh\") pod \"certified-operators-8gzgn\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.525505 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-utilities\") pod \"certified-operators-8gzgn\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.525538 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-catalog-content\") pod \"certified-operators-8gzgn\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.626240 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-utilities\") pod \"certified-operators-8gzgn\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.626285 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-catalog-content\") pod \"certified-operators-8gzgn\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.626329 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g74qh\" (UniqueName: \"kubernetes.io/projected/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-kube-api-access-g74qh\") pod \"certified-operators-8gzgn\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.627067 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-catalog-content\") pod \"certified-operators-8gzgn\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.627163 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-utilities\") pod \"certified-operators-8gzgn\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.653583 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g74qh\" (UniqueName: \"kubernetes.io/projected/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-kube-api-access-g74qh\") pod \"certified-operators-8gzgn\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:50 crc kubenswrapper[4755]: I1124 01:25:50.762846 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:25:51 crc kubenswrapper[4755]: I1124 01:25:51.160660 4755 generic.go:334] "Generic (PLEG): container finished" podID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerID="ecd29b8e139706b4c311bb86011176bdff6c350f9be2a4eacc47b743c903cc82" exitCode=0 Nov 24 01:25:51 crc kubenswrapper[4755]: I1124 01:25:51.160713 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65p2h" event={"ID":"7de87f2f-d51b-476a-a3e2-570b6dadedb6","Type":"ContainerDied","Data":"ecd29b8e139706b4c311bb86011176bdff6c350f9be2a4eacc47b743c903cc82"} Nov 24 01:25:51 crc kubenswrapper[4755]: I1124 01:25:51.160746 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65p2h" event={"ID":"7de87f2f-d51b-476a-a3e2-570b6dadedb6","Type":"ContainerStarted","Data":"c2384c8186af41fbbd6f6c22b23b45ca520acc269ba0a2ddb8cd4245e804ec90"} Nov 24 01:25:51 crc kubenswrapper[4755]: I1124 01:25:51.314301 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8gzgn"] Nov 24 01:25:51 crc kubenswrapper[4755]: W1124 01:25:51.318445 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8c1a228_1510_48be_bb0a_3d5dea46aa5e.slice/crio-62023b935f4c98473f5edf00ad364c808541d1b50956732b6d0387cfed3029f7 WatchSource:0}: Error finding container 62023b935f4c98473f5edf00ad364c808541d1b50956732b6d0387cfed3029f7: Status 404 returned error can't find the container with id 62023b935f4c98473f5edf00ad364c808541d1b50956732b6d0387cfed3029f7 Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.171264 4755 generic.go:334] "Generic (PLEG): container finished" podID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerID="bd960007a557a65cc12ea4a1b35d29fca7f6c1306032090fd7f6b44448828d0c" exitCode=0 Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.171462 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gzgn" event={"ID":"e8c1a228-1510-48be-bb0a-3d5dea46aa5e","Type":"ContainerDied","Data":"bd960007a557a65cc12ea4a1b35d29fca7f6c1306032090fd7f6b44448828d0c"} Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.171631 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gzgn" event={"ID":"e8c1a228-1510-48be-bb0a-3d5dea46aa5e","Type":"ContainerStarted","Data":"62023b935f4c98473f5edf00ad364c808541d1b50956732b6d0387cfed3029f7"} Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.176019 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65p2h" event={"ID":"7de87f2f-d51b-476a-a3e2-570b6dadedb6","Type":"ContainerStarted","Data":"09b0a5a54ba499ea1f7821a9bbe73678f47da834da761ec5b9fbae70778f73a6"} Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.680055 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.681835 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.683753 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.684962 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.686975 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-tfk92" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.692069 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-gmw86" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.697027 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.700166 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.711222 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.712201 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.717301 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-b9654" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.725132 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.770837 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-56f54d6746-45h29"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.786532 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.798271 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.799708 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.800018 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.804403 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-56f54d6746-45h29"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.805984 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhws4\" (UniqueName: \"kubernetes.io/projected/a22ef49d-f887-41f4-ad37-6b1b0bf7a748-kube-api-access-qhws4\") pod \"glance-operator-controller-manager-7969689c84-bnmvb\" (UID: \"a22ef49d-f887-41f4-ad37-6b1b0bf7a748\") " pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.806159 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-cl4kw" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.806212 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtfrx\" (UniqueName: \"kubernetes.io/projected/fc04cdee-f1bd-4d40-9c1c-02f4e9661851-kube-api-access-mtfrx\") pod \"cinder-operator-controller-manager-6498cbf48f-hh7kh\" (UID: \"fc04cdee-f1bd-4d40-9c1c-02f4e9661851\") " pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.806285 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94sxx\" (UniqueName: \"kubernetes.io/projected/22780566-edb3-47e3-b3ea-a42def0f4460-kube-api-access-94sxx\") pod \"heat-operator-controller-manager-56f54d6746-45h29\" (UID: \"22780566-edb3-47e3-b3ea-a42def0f4460\") " pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.806411 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4trz9\" (UniqueName: \"kubernetes.io/projected/e84b7100-14c9-436c-97e5-d14c2455b42a-kube-api-access-4trz9\") pod \"designate-operator-controller-manager-767ccfd65f-fdwk5\" (UID: \"e84b7100-14c9-436c-97e5-d14c2455b42a\") " pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.806490 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrdpk\" (UniqueName: \"kubernetes.io/projected/1660a0eb-228b-41bc-a360-a71fec20d415-kube-api-access-qrdpk\") pod \"barbican-operator-controller-manager-75fb479bcc-b297l\" (UID: \"1660a0eb-228b-41bc-a360-a71fec20d415\") " pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.806630 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-x2bkn" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.810992 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.812762 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.814597 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.814878 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-7m5hc" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.819551 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.821014 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.825597 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.830915 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.831146 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.833134 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-295pk" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.842020 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-wkvnj" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.843556 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.871642 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7454b96578-5622b"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.872819 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.881011 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-6l92c" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.886677 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.898867 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7454b96578-5622b"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.911938 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd91f6d6-1cc9-4350-a22a-b3859073f6e0-cert\") pod \"infra-operator-controller-manager-6dd8864d7c-zsbst\" (UID: \"fd91f6d6-1cc9-4350-a22a-b3859073f6e0\") " pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.912000 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4trz9\" (UniqueName: \"kubernetes.io/projected/e84b7100-14c9-436c-97e5-d14c2455b42a-kube-api-access-4trz9\") pod \"designate-operator-controller-manager-767ccfd65f-fdwk5\" (UID: \"e84b7100-14c9-436c-97e5-d14c2455b42a\") " pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.912062 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vztsm\" (UniqueName: \"kubernetes.io/projected/f443bd2d-3e36-44eb-9684-8ec505b8bea7-kube-api-access-vztsm\") pod \"ironic-operator-controller-manager-99b499f4-p9wgw\" (UID: \"f443bd2d-3e36-44eb-9684-8ec505b8bea7\") " pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.912096 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrdpk\" (UniqueName: \"kubernetes.io/projected/1660a0eb-228b-41bc-a360-a71fec20d415-kube-api-access-qrdpk\") pod \"barbican-operator-controller-manager-75fb479bcc-b297l\" (UID: \"1660a0eb-228b-41bc-a360-a71fec20d415\") " pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.912120 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5wrh\" (UniqueName: \"kubernetes.io/projected/fd91f6d6-1cc9-4350-a22a-b3859073f6e0-kube-api-access-j5wrh\") pod \"infra-operator-controller-manager-6dd8864d7c-zsbst\" (UID: \"fd91f6d6-1cc9-4350-a22a-b3859073f6e0\") " pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.912160 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhws4\" (UniqueName: \"kubernetes.io/projected/a22ef49d-f887-41f4-ad37-6b1b0bf7a748-kube-api-access-qhws4\") pod \"glance-operator-controller-manager-7969689c84-bnmvb\" (UID: \"a22ef49d-f887-41f4-ad37-6b1b0bf7a748\") " pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.912189 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvhsn\" (UniqueName: \"kubernetes.io/projected/dd14c3fa-bb96-4795-b339-a506c71b16a2-kube-api-access-cvhsn\") pod \"keystone-operator-controller-manager-7454b96578-5622b\" (UID: \"dd14c3fa-bb96-4795-b339-a506c71b16a2\") " pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.912219 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtfrx\" (UniqueName: \"kubernetes.io/projected/fc04cdee-f1bd-4d40-9c1c-02f4e9661851-kube-api-access-mtfrx\") pod \"cinder-operator-controller-manager-6498cbf48f-hh7kh\" (UID: \"fc04cdee-f1bd-4d40-9c1c-02f4e9661851\") " pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.912256 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94sxx\" (UniqueName: \"kubernetes.io/projected/22780566-edb3-47e3-b3ea-a42def0f4460-kube-api-access-94sxx\") pod \"heat-operator-controller-manager-56f54d6746-45h29\" (UID: \"22780566-edb3-47e3-b3ea-a42def0f4460\") " pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.912296 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckkzf\" (UniqueName: \"kubernetes.io/projected/77153df1-136d-456e-a6e0-817b2f633d3e-kube-api-access-ckkzf\") pod \"horizon-operator-controller-manager-598f69df5d-97znt\" (UID: \"77153df1-136d-456e-a6e0-817b2f633d3e\") " pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.919047 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.920052 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.929362 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-lqqbj" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.937796 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4trz9\" (UniqueName: \"kubernetes.io/projected/e84b7100-14c9-436c-97e5-d14c2455b42a-kube-api-access-4trz9\") pod \"designate-operator-controller-manager-767ccfd65f-fdwk5\" (UID: \"e84b7100-14c9-436c-97e5-d14c2455b42a\") " pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.941439 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.942787 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.944056 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhws4\" (UniqueName: \"kubernetes.io/projected/a22ef49d-f887-41f4-ad37-6b1b0bf7a748-kube-api-access-qhws4\") pod \"glance-operator-controller-manager-7969689c84-bnmvb\" (UID: \"a22ef49d-f887-41f4-ad37-6b1b0bf7a748\") " pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.944254 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtfrx\" (UniqueName: \"kubernetes.io/projected/fc04cdee-f1bd-4d40-9c1c-02f4e9661851-kube-api-access-mtfrx\") pod \"cinder-operator-controller-manager-6498cbf48f-hh7kh\" (UID: \"fc04cdee-f1bd-4d40-9c1c-02f4e9661851\") " pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.946992 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-flbx4" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.950308 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrdpk\" (UniqueName: \"kubernetes.io/projected/1660a0eb-228b-41bc-a360-a71fec20d415-kube-api-access-qrdpk\") pod \"barbican-operator-controller-manager-75fb479bcc-b297l\" (UID: \"1660a0eb-228b-41bc-a360-a71fec20d415\") " pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.958101 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94sxx\" (UniqueName: \"kubernetes.io/projected/22780566-edb3-47e3-b3ea-a42def0f4460-kube-api-access-94sxx\") pod \"heat-operator-controller-manager-56f54d6746-45h29\" (UID: \"22780566-edb3-47e3-b3ea-a42def0f4460\") " pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.958175 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.977306 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.977340 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.977353 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q"] Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.977427 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.980165 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-krzhh" Nov 24 01:25:52 crc kubenswrapper[4755]: I1124 01:25:52.997240 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.000968 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.002586 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.004745 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-66292" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.010334 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.013858 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5wrh\" (UniqueName: \"kubernetes.io/projected/fd91f6d6-1cc9-4350-a22a-b3859073f6e0-kube-api-access-j5wrh\") pod \"infra-operator-controller-manager-6dd8864d7c-zsbst\" (UID: \"fd91f6d6-1cc9-4350-a22a-b3859073f6e0\") " pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.013909 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvhsn\" (UniqueName: \"kubernetes.io/projected/dd14c3fa-bb96-4795-b339-a506c71b16a2-kube-api-access-cvhsn\") pod \"keystone-operator-controller-manager-7454b96578-5622b\" (UID: \"dd14c3fa-bb96-4795-b339-a506c71b16a2\") " pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.013951 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckkzf\" (UniqueName: \"kubernetes.io/projected/77153df1-136d-456e-a6e0-817b2f633d3e-kube-api-access-ckkzf\") pod \"horizon-operator-controller-manager-598f69df5d-97znt\" (UID: \"77153df1-136d-456e-a6e0-817b2f633d3e\") " pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.013974 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd91f6d6-1cc9-4350-a22a-b3859073f6e0-cert\") pod \"infra-operator-controller-manager-6dd8864d7c-zsbst\" (UID: \"fd91f6d6-1cc9-4350-a22a-b3859073f6e0\") " pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.014023 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vztsm\" (UniqueName: \"kubernetes.io/projected/f443bd2d-3e36-44eb-9684-8ec505b8bea7-kube-api-access-vztsm\") pod \"ironic-operator-controller-manager-99b499f4-p9wgw\" (UID: \"f443bd2d-3e36-44eb-9684-8ec505b8bea7\") " pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" Nov 24 01:25:53 crc kubenswrapper[4755]: E1124 01:25:53.015389 4755 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 24 01:25:53 crc kubenswrapper[4755]: E1124 01:25:53.015437 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd91f6d6-1cc9-4350-a22a-b3859073f6e0-cert podName:fd91f6d6-1cc9-4350-a22a-b3859073f6e0 nodeName:}" failed. No retries permitted until 2025-11-24 01:25:53.515421317 +0000 UTC m=+778.201486818 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fd91f6d6-1cc9-4350-a22a-b3859073f6e0-cert") pod "infra-operator-controller-manager-6dd8864d7c-zsbst" (UID: "fd91f6d6-1cc9-4350-a22a-b3859073f6e0") : secret "infra-operator-webhook-server-cert" not found Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.018410 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.036711 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.037768 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.044353 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-7tjmd" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.047414 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.052663 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.069720 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.070891 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.082758 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.090934 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.104773 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckkzf\" (UniqueName: \"kubernetes.io/projected/77153df1-136d-456e-a6e0-817b2f633d3e-kube-api-access-ckkzf\") pod \"horizon-operator-controller-manager-598f69df5d-97znt\" (UID: \"77153df1-136d-456e-a6e0-817b2f633d3e\") " pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.104960 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-hmrd6" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.105028 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.105915 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-45m52" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.109234 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvhsn\" (UniqueName: \"kubernetes.io/projected/dd14c3fa-bb96-4795-b339-a506c71b16a2-kube-api-access-cvhsn\") pod \"keystone-operator-controller-manager-7454b96578-5622b\" (UID: \"dd14c3fa-bb96-4795-b339-a506c71b16a2\") " pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.110579 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5wrh\" (UniqueName: \"kubernetes.io/projected/fd91f6d6-1cc9-4350-a22a-b3859073f6e0-kube-api-access-j5wrh\") pod \"infra-operator-controller-manager-6dd8864d7c-zsbst\" (UID: \"fd91f6d6-1cc9-4350-a22a-b3859073f6e0\") " pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.113531 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vztsm\" (UniqueName: \"kubernetes.io/projected/f443bd2d-3e36-44eb-9684-8ec505b8bea7-kube-api-access-vztsm\") pod \"ironic-operator-controller-manager-99b499f4-p9wgw\" (UID: \"f443bd2d-3e36-44eb-9684-8ec505b8bea7\") " pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.119710 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rjx7\" (UniqueName: \"kubernetes.io/projected/9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07-kube-api-access-8rjx7\") pod \"nova-operator-controller-manager-cfbb9c588-prcbz\" (UID: \"9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07\") " pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.119772 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfkkn\" (UniqueName: \"kubernetes.io/projected/f013c70d-8c89-40f5-a132-393403d297c2-kube-api-access-qfkkn\") pod \"openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv\" (UID: \"f013c70d-8c89-40f5-a132-393403d297c2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.119798 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z95f7\" (UniqueName: \"kubernetes.io/projected/9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19-kube-api-access-z95f7\") pod \"neutron-operator-controller-manager-78bd47f458-pcp8q\" (UID: \"9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19\") " pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.119816 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkw4t\" (UniqueName: \"kubernetes.io/projected/33d6bfe7-943b-4a59-bfdd-e240b869163d-kube-api-access-kkw4t\") pod \"manila-operator-controller-manager-58f887965d-2rnh9\" (UID: \"33d6bfe7-943b-4a59-bfdd-e240b869163d\") " pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.119832 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm6jh\" (UniqueName: \"kubernetes.io/projected/1a7c3ac2-1c0f-474e-837c-b80226975978-kube-api-access-dm6jh\") pod \"mariadb-operator-controller-manager-54b5986bb8-gqlfb\" (UID: \"1a7c3ac2-1c0f-474e-837c-b80226975978\") " pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.119850 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9bj9\" (UniqueName: \"kubernetes.io/projected/3c770fe2-ea89-4ba8-b4f0-95a4f310ea65-kube-api-access-g9bj9\") pod \"octavia-operator-controller-manager-54cfbf4c7d-7t6ft\" (UID: \"3c770fe2-ea89-4ba8-b4f0-95a4f310ea65\") " pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.119873 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f013c70d-8c89-40f5-a132-393403d297c2-cert\") pod \"openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv\" (UID: \"f013c70d-8c89-40f5-a132-393403d297c2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.119940 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ht287\" (UniqueName: \"kubernetes.io/projected/73185acc-71f3-452e-8454-ebad97b6c6ad-kube-api-access-ht287\") pod \"ovn-operator-controller-manager-54fc5f65b7-b94wx\" (UID: \"73185acc-71f3-452e-8454-ebad97b6c6ad\") " pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.124694 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.132215 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.142875 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.146208 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.170555 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.175572 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.178194 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.182508 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-5f7vj" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.211011 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.222218 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rjx7\" (UniqueName: \"kubernetes.io/projected/9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07-kube-api-access-8rjx7\") pod \"nova-operator-controller-manager-cfbb9c588-prcbz\" (UID: \"9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07\") " pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.222303 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfkkn\" (UniqueName: \"kubernetes.io/projected/f013c70d-8c89-40f5-a132-393403d297c2-kube-api-access-qfkkn\") pod \"openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv\" (UID: \"f013c70d-8c89-40f5-a132-393403d297c2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.222356 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z95f7\" (UniqueName: \"kubernetes.io/projected/9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19-kube-api-access-z95f7\") pod \"neutron-operator-controller-manager-78bd47f458-pcp8q\" (UID: \"9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19\") " pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.222388 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkw4t\" (UniqueName: \"kubernetes.io/projected/33d6bfe7-943b-4a59-bfdd-e240b869163d-kube-api-access-kkw4t\") pod \"manila-operator-controller-manager-58f887965d-2rnh9\" (UID: \"33d6bfe7-943b-4a59-bfdd-e240b869163d\") " pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.222420 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm6jh\" (UniqueName: \"kubernetes.io/projected/1a7c3ac2-1c0f-474e-837c-b80226975978-kube-api-access-dm6jh\") pod \"mariadb-operator-controller-manager-54b5986bb8-gqlfb\" (UID: \"1a7c3ac2-1c0f-474e-837c-b80226975978\") " pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.222454 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9bj9\" (UniqueName: \"kubernetes.io/projected/3c770fe2-ea89-4ba8-b4f0-95a4f310ea65-kube-api-access-g9bj9\") pod \"octavia-operator-controller-manager-54cfbf4c7d-7t6ft\" (UID: \"3c770fe2-ea89-4ba8-b4f0-95a4f310ea65\") " pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.222501 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f013c70d-8c89-40f5-a132-393403d297c2-cert\") pod \"openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv\" (UID: \"f013c70d-8c89-40f5-a132-393403d297c2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.222566 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ht287\" (UniqueName: \"kubernetes.io/projected/73185acc-71f3-452e-8454-ebad97b6c6ad-kube-api-access-ht287\") pod \"ovn-operator-controller-manager-54fc5f65b7-b94wx\" (UID: \"73185acc-71f3-452e-8454-ebad97b6c6ad\") " pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" Nov 24 01:25:53 crc kubenswrapper[4755]: E1124 01:25:53.223399 4755 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 24 01:25:53 crc kubenswrapper[4755]: E1124 01:25:53.223474 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f013c70d-8c89-40f5-a132-393403d297c2-cert podName:f013c70d-8c89-40f5-a132-393403d297c2 nodeName:}" failed. No retries permitted until 2025-11-24 01:25:53.723446972 +0000 UTC m=+778.409512473 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f013c70d-8c89-40f5-a132-393403d297c2-cert") pod "openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" (UID: "f013c70d-8c89-40f5-a132-393403d297c2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.243193 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.245643 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.281589 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9bj9\" (UniqueName: \"kubernetes.io/projected/3c770fe2-ea89-4ba8-b4f0-95a4f310ea65-kube-api-access-g9bj9\") pod \"octavia-operator-controller-manager-54cfbf4c7d-7t6ft\" (UID: \"3c770fe2-ea89-4ba8-b4f0-95a4f310ea65\") " pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.285330 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm6jh\" (UniqueName: \"kubernetes.io/projected/1a7c3ac2-1c0f-474e-837c-b80226975978-kube-api-access-dm6jh\") pod \"mariadb-operator-controller-manager-54b5986bb8-gqlfb\" (UID: \"1a7c3ac2-1c0f-474e-837c-b80226975978\") " pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.285795 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rjx7\" (UniqueName: \"kubernetes.io/projected/9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07-kube-api-access-8rjx7\") pod \"nova-operator-controller-manager-cfbb9c588-prcbz\" (UID: \"9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07\") " pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.286172 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gzgn" event={"ID":"e8c1a228-1510-48be-bb0a-3d5dea46aa5e","Type":"ContainerStarted","Data":"ef9f046338e7c014242d0e23d17ed0b91c51cf306b7085502606d2ef7c362d11"} Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.287783 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ht287\" (UniqueName: \"kubernetes.io/projected/73185acc-71f3-452e-8454-ebad97b6c6ad-kube-api-access-ht287\") pod \"ovn-operator-controller-manager-54fc5f65b7-b94wx\" (UID: \"73185acc-71f3-452e-8454-ebad97b6c6ad\") " pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.289440 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-zk99z" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.293344 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkw4t\" (UniqueName: \"kubernetes.io/projected/33d6bfe7-943b-4a59-bfdd-e240b869163d-kube-api-access-kkw4t\") pod \"manila-operator-controller-manager-58f887965d-2rnh9\" (UID: \"33d6bfe7-943b-4a59-bfdd-e240b869163d\") " pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.298202 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z95f7\" (UniqueName: \"kubernetes.io/projected/9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19-kube-api-access-z95f7\") pod \"neutron-operator-controller-manager-78bd47f458-pcp8q\" (UID: \"9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19\") " pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.298447 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfkkn\" (UniqueName: \"kubernetes.io/projected/f013c70d-8c89-40f5-a132-393403d297c2-kube-api-access-qfkkn\") pod \"openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv\" (UID: \"f013c70d-8c89-40f5-a132-393403d297c2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.299957 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.300763 4755 generic.go:334] "Generic (PLEG): container finished" podID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerID="09b0a5a54ba499ea1f7821a9bbe73678f47da834da761ec5b9fbae70778f73a6" exitCode=0 Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.300805 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65p2h" event={"ID":"7de87f2f-d51b-476a-a3e2-570b6dadedb6","Type":"ContainerDied","Data":"09b0a5a54ba499ea1f7821a9bbe73678f47da834da761ec5b9fbae70778f73a6"} Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.309518 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.325457 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9x68\" (UniqueName: \"kubernetes.io/projected/eb1590a5-3843-4540-ac41-bdfe49ae6569-kube-api-access-v9x68\") pod \"placement-operator-controller-manager-5b797b8dff-s22mt\" (UID: \"eb1590a5-3843-4540-ac41-bdfe49ae6569\") " pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.338954 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.348677 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.352920 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.354242 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.358862 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.382727 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-cz5r4" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.383196 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.394231 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.396668 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.398104 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.401152 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-fpnnk" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.427683 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.428751 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzmlx\" (UniqueName: \"kubernetes.io/projected/62e235a5-7928-4e26-9948-a3d2a829ef23-kube-api-access-vzmlx\") pod \"swift-operator-controller-manager-d656998f4-kbj7b\" (UID: \"62e235a5-7928-4e26-9948-a3d2a829ef23\") " pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.428875 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9x68\" (UniqueName: \"kubernetes.io/projected/eb1590a5-3843-4540-ac41-bdfe49ae6569-kube-api-access-v9x68\") pod \"placement-operator-controller-manager-5b797b8dff-s22mt\" (UID: \"eb1590a5-3843-4540-ac41-bdfe49ae6569\") " pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.452764 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.453839 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.461066 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-97jv9" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.461992 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.475228 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9x68\" (UniqueName: \"kubernetes.io/projected/eb1590a5-3843-4540-ac41-bdfe49ae6569-kube-api-access-v9x68\") pod \"placement-operator-controller-manager-5b797b8dff-s22mt\" (UID: \"eb1590a5-3843-4540-ac41-bdfe49ae6569\") " pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.476852 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.531194 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzmlx\" (UniqueName: \"kubernetes.io/projected/62e235a5-7928-4e26-9948-a3d2a829ef23-kube-api-access-vzmlx\") pod \"swift-operator-controller-manager-d656998f4-kbj7b\" (UID: \"62e235a5-7928-4e26-9948-a3d2a829ef23\") " pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.531256 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j564v\" (UniqueName: \"kubernetes.io/projected/25b69b88-4612-4183-a978-b9dd58502d37-kube-api-access-j564v\") pod \"telemetry-operator-controller-manager-6d4bf84b58-7pmzf\" (UID: \"25b69b88-4612-4183-a978-b9dd58502d37\") " pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.531277 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dct8w\" (UniqueName: \"kubernetes.io/projected/8914c196-79e5-456c-9a42-1f4464f8dbf8-kube-api-access-dct8w\") pod \"test-operator-controller-manager-b4c496f69-n9dqh\" (UID: \"8914c196-79e5-456c-9a42-1f4464f8dbf8\") " pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.531710 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd91f6d6-1cc9-4350-a22a-b3859073f6e0-cert\") pod \"infra-operator-controller-manager-6dd8864d7c-zsbst\" (UID: \"fd91f6d6-1cc9-4350-a22a-b3859073f6e0\") " pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.558089 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd91f6d6-1cc9-4350-a22a-b3859073f6e0-cert\") pod \"infra-operator-controller-manager-6dd8864d7c-zsbst\" (UID: \"fd91f6d6-1cc9-4350-a22a-b3859073f6e0\") " pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.582911 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.591867 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.592239 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzmlx\" (UniqueName: \"kubernetes.io/projected/62e235a5-7928-4e26-9948-a3d2a829ef23-kube-api-access-vzmlx\") pod \"swift-operator-controller-manager-d656998f4-kbj7b\" (UID: \"62e235a5-7928-4e26-9948-a3d2a829ef23\") " pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.592456 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.593311 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.599944 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.600155 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-lrdsl" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.615192 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.638979 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.639872 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.652586 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-4qwr4" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.653538 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-664d2\" (UniqueName: \"kubernetes.io/projected/2107694a-19fc-40cd-9ef2-b8b60b8b88e2-kube-api-access-664d2\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d\" (UID: \"2107694a-19fc-40cd-9ef2-b8b60b8b88e2\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.653581 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpvb7\" (UniqueName: \"kubernetes.io/projected/03ab2bfa-29d5-408b-8d69-54b8b367be23-kube-api-access-hpvb7\") pod \"watcher-operator-controller-manager-8c6448b9f-j56hj\" (UID: \"03ab2bfa-29d5-408b-8d69-54b8b367be23\") " pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.653649 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j564v\" (UniqueName: \"kubernetes.io/projected/25b69b88-4612-4183-a978-b9dd58502d37-kube-api-access-j564v\") pod \"telemetry-operator-controller-manager-6d4bf84b58-7pmzf\" (UID: \"25b69b88-4612-4183-a978-b9dd58502d37\") " pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.653668 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dct8w\" (UniqueName: \"kubernetes.io/projected/8914c196-79e5-456c-9a42-1f4464f8dbf8-kube-api-access-dct8w\") pod \"test-operator-controller-manager-b4c496f69-n9dqh\" (UID: \"8914c196-79e5-456c-9a42-1f4464f8dbf8\") " pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.654203 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.665501 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.679285 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dct8w\" (UniqueName: \"kubernetes.io/projected/8914c196-79e5-456c-9a42-1f4464f8dbf8-kube-api-access-dct8w\") pod \"test-operator-controller-manager-b4c496f69-n9dqh\" (UID: \"8914c196-79e5-456c-9a42-1f4464f8dbf8\") " pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.686668 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j564v\" (UniqueName: \"kubernetes.io/projected/25b69b88-4612-4183-a978-b9dd58502d37-kube-api-access-j564v\") pod \"telemetry-operator-controller-manager-6d4bf84b58-7pmzf\" (UID: \"25b69b88-4612-4183-a978-b9dd58502d37\") " pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.688075 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.690825 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d"] Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.704184 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.755664 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8695f181-2de4-4fa8-b952-8208ab710b94-cert\") pod \"openstack-operator-controller-manager-68786bb554-2pljv\" (UID: \"8695f181-2de4-4fa8-b952-8208ab710b94\") " pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.755715 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpvb7\" (UniqueName: \"kubernetes.io/projected/03ab2bfa-29d5-408b-8d69-54b8b367be23-kube-api-access-hpvb7\") pod \"watcher-operator-controller-manager-8c6448b9f-j56hj\" (UID: \"03ab2bfa-29d5-408b-8d69-54b8b367be23\") " pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.755734 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-664d2\" (UniqueName: \"kubernetes.io/projected/2107694a-19fc-40cd-9ef2-b8b60b8b88e2-kube-api-access-664d2\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d\" (UID: \"2107694a-19fc-40cd-9ef2-b8b60b8b88e2\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.755777 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gljbw\" (UniqueName: \"kubernetes.io/projected/8695f181-2de4-4fa8-b952-8208ab710b94-kube-api-access-gljbw\") pod \"openstack-operator-controller-manager-68786bb554-2pljv\" (UID: \"8695f181-2de4-4fa8-b952-8208ab710b94\") " pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.755850 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f013c70d-8c89-40f5-a132-393403d297c2-cert\") pod \"openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv\" (UID: \"f013c70d-8c89-40f5-a132-393403d297c2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:25:53 crc kubenswrapper[4755]: E1124 01:25:53.755985 4755 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 24 01:25:53 crc kubenswrapper[4755]: E1124 01:25:53.756054 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f013c70d-8c89-40f5-a132-393403d297c2-cert podName:f013c70d-8c89-40f5-a132-393403d297c2 nodeName:}" failed. No retries permitted until 2025-11-24 01:25:54.756020663 +0000 UTC m=+779.442086164 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f013c70d-8c89-40f5-a132-393403d297c2-cert") pod "openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" (UID: "f013c70d-8c89-40f5-a132-393403d297c2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.779968 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpvb7\" (UniqueName: \"kubernetes.io/projected/03ab2bfa-29d5-408b-8d69-54b8b367be23-kube-api-access-hpvb7\") pod \"watcher-operator-controller-manager-8c6448b9f-j56hj\" (UID: \"03ab2bfa-29d5-408b-8d69-54b8b367be23\") " pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.782225 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-664d2\" (UniqueName: \"kubernetes.io/projected/2107694a-19fc-40cd-9ef2-b8b60b8b88e2-kube-api-access-664d2\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d\" (UID: \"2107694a-19fc-40cd-9ef2-b8b60b8b88e2\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.790650 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.857011 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gljbw\" (UniqueName: \"kubernetes.io/projected/8695f181-2de4-4fa8-b952-8208ab710b94-kube-api-access-gljbw\") pod \"openstack-operator-controller-manager-68786bb554-2pljv\" (UID: \"8695f181-2de4-4fa8-b952-8208ab710b94\") " pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.857109 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8695f181-2de4-4fa8-b952-8208ab710b94-cert\") pod \"openstack-operator-controller-manager-68786bb554-2pljv\" (UID: \"8695f181-2de4-4fa8-b952-8208ab710b94\") " pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:25:53 crc kubenswrapper[4755]: E1124 01:25:53.858115 4755 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 24 01:25:53 crc kubenswrapper[4755]: E1124 01:25:53.858152 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8695f181-2de4-4fa8-b952-8208ab710b94-cert podName:8695f181-2de4-4fa8-b952-8208ab710b94 nodeName:}" failed. No retries permitted until 2025-11-24 01:25:54.358139126 +0000 UTC m=+779.044204627 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8695f181-2de4-4fa8-b952-8208ab710b94-cert") pod "openstack-operator-controller-manager-68786bb554-2pljv" (UID: "8695f181-2de4-4fa8-b952-8208ab710b94") : secret "webhook-server-cert" not found Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.875780 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gljbw\" (UniqueName: \"kubernetes.io/projected/8695f181-2de4-4fa8-b952-8208ab710b94-kube-api-access-gljbw\") pod \"openstack-operator-controller-manager-68786bb554-2pljv\" (UID: \"8695f181-2de4-4fa8-b952-8208ab710b94\") " pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:25:53 crc kubenswrapper[4755]: I1124 01:25:53.933509 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5"] Nov 24 01:25:53 crc kubenswrapper[4755]: W1124 01:25:53.992743 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode84b7100_14c9_436c_97e5_d14c2455b42a.slice/crio-5c0bf41f08ac7d8b17989b8f90ee4def33f8797f9b339d2ec352169c1f84574a WatchSource:0}: Error finding container 5c0bf41f08ac7d8b17989b8f90ee4def33f8797f9b339d2ec352169c1f84574a: Status 404 returned error can't find the container with id 5c0bf41f08ac7d8b17989b8f90ee4def33f8797f9b339d2ec352169c1f84574a Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.033529 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.057350 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.090533 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh"] Nov 24 01:25:54 crc kubenswrapper[4755]: W1124 01:25:54.148672 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc04cdee_f1bd_4d40_9c1c_02f4e9661851.slice/crio-db940b147252d4dd4c8e6eb25c8081271fa13ea99e1b11075af5ab969906ddde WatchSource:0}: Error finding container db940b147252d4dd4c8e6eb25c8081271fa13ea99e1b11075af5ab969906ddde: Status 404 returned error can't find the container with id db940b147252d4dd4c8e6eb25c8081271fa13ea99e1b11075af5ab969906ddde Nov 24 01:25:54 crc kubenswrapper[4755]: W1124 01:25:54.248706 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33d6bfe7_943b_4a59_bfdd_e240b869163d.slice/crio-c96f2e550579029f3395155aa4c31412791bf101e030f64562e9e464f5b97fd5 WatchSource:0}: Error finding container c96f2e550579029f3395155aa4c31412791bf101e030f64562e9e464f5b97fd5: Status 404 returned error can't find the container with id c96f2e550579029f3395155aa4c31412791bf101e030f64562e9e464f5b97fd5 Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.248755 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9"] Nov 24 01:25:54 crc kubenswrapper[4755]: W1124 01:25:54.278305 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1660a0eb_228b_41bc_a360_a71fec20d415.slice/crio-17139c3ffb063f7addc07bb75a697c85f62cd6af5bc66b3556bf28e1405b8a06 WatchSource:0}: Error finding container 17139c3ffb063f7addc07bb75a697c85f62cd6af5bc66b3556bf28e1405b8a06: Status 404 returned error can't find the container with id 17139c3ffb063f7addc07bb75a697c85f62cd6af5bc66b3556bf28e1405b8a06 Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.279748 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb"] Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.312844 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" event={"ID":"fc04cdee-f1bd-4d40-9c1c-02f4e9661851","Type":"ContainerStarted","Data":"db940b147252d4dd4c8e6eb25c8081271fa13ea99e1b11075af5ab969906ddde"} Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.318964 4755 generic.go:334] "Generic (PLEG): container finished" podID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerID="ef9f046338e7c014242d0e23d17ed0b91c51cf306b7085502606d2ef7c362d11" exitCode=0 Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.319026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gzgn" event={"ID":"e8c1a228-1510-48be-bb0a-3d5dea46aa5e","Type":"ContainerDied","Data":"ef9f046338e7c014242d0e23d17ed0b91c51cf306b7085502606d2ef7c362d11"} Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.324394 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l"] Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.334974 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" event={"ID":"1a7c3ac2-1c0f-474e-837c-b80226975978","Type":"ContainerStarted","Data":"77d464f8544527fda59882a13fe49b0cd247745db269d5b058b60e02eb20c933"} Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.342918 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" event={"ID":"1660a0eb-228b-41bc-a360-a71fec20d415","Type":"ContainerStarted","Data":"17139c3ffb063f7addc07bb75a697c85f62cd6af5bc66b3556bf28e1405b8a06"} Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.346461 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" event={"ID":"33d6bfe7-943b-4a59-bfdd-e240b869163d","Type":"ContainerStarted","Data":"c96f2e550579029f3395155aa4c31412791bf101e030f64562e9e464f5b97fd5"} Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.348399 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" event={"ID":"e84b7100-14c9-436c-97e5-d14c2455b42a","Type":"ContainerStarted","Data":"5c0bf41f08ac7d8b17989b8f90ee4def33f8797f9b339d2ec352169c1f84574a"} Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.370468 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8695f181-2de4-4fa8-b952-8208ab710b94-cert\") pod \"openstack-operator-controller-manager-68786bb554-2pljv\" (UID: \"8695f181-2de4-4fa8-b952-8208ab710b94\") " pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.378256 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8695f181-2de4-4fa8-b952-8208ab710b94-cert\") pod \"openstack-operator-controller-manager-68786bb554-2pljv\" (UID: \"8695f181-2de4-4fa8-b952-8208ab710b94\") " pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.432708 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx"] Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.439571 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt"] Nov 24 01:25:54 crc kubenswrapper[4755]: W1124 01:25:54.441301 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73185acc_71f3_452e_8454_ebad97b6c6ad.slice/crio-48e0c7084d2a1b48e61a8fed71301b2f340977cdfc716a2eeb0dd69ef9a6c7d7 WatchSource:0}: Error finding container 48e0c7084d2a1b48e61a8fed71301b2f340977cdfc716a2eeb0dd69ef9a6c7d7: Status 404 returned error can't find the container with id 48e0c7084d2a1b48e61a8fed71301b2f340977cdfc716a2eeb0dd69ef9a6c7d7 Nov 24 01:25:54 crc kubenswrapper[4755]: W1124 01:25:54.446475 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77153df1_136d_456e_a6e0_817b2f633d3e.slice/crio-56e6ff9205c8983cab32ae17b7853fee1f455f4b4efa63b5bf031aeaae07d08d WatchSource:0}: Error finding container 56e6ff9205c8983cab32ae17b7853fee1f455f4b4efa63b5bf031aeaae07d08d: Status 404 returned error can't find the container with id 56e6ff9205c8983cab32ae17b7853fee1f455f4b4efa63b5bf031aeaae07d08d Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.464319 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-56f54d6746-45h29"] Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.471555 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7454b96578-5622b"] Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.633527 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.664949 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw"] Nov 24 01:25:54 crc kubenswrapper[4755]: W1124 01:25:54.674821 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf443bd2d_3e36_44eb_9684_8ec505b8bea7.slice/crio-dbe4d89d836accbcec04b08708eed8aa051d1b82fea0ab99186ab34eaccb545e WatchSource:0}: Error finding container dbe4d89d836accbcec04b08708eed8aa051d1b82fea0ab99186ab34eaccb545e: Status 404 returned error can't find the container with id dbe4d89d836accbcec04b08708eed8aa051d1b82fea0ab99186ab34eaccb545e Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.686143 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft"] Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.690518 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb"] Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.696158 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt"] Nov 24 01:25:54 crc kubenswrapper[4755]: E1124 01:25:54.722719 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-v9x68,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5b797b8dff-s22mt_openstack-operators(eb1590a5-3843-4540-ac41-bdfe49ae6569): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.784533 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f013c70d-8c89-40f5-a132-393403d297c2-cert\") pod \"openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv\" (UID: \"f013c70d-8c89-40f5-a132-393403d297c2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.789841 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f013c70d-8c89-40f5-a132-393403d297c2-cert\") pod \"openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv\" (UID: \"f013c70d-8c89-40f5-a132-393403d297c2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.858397 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q"] Nov 24 01:25:54 crc kubenswrapper[4755]: I1124 01:25:54.885081 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz"] Nov 24 01:25:54 crc kubenswrapper[4755]: E1124 01:25:54.914726 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" podUID="eb1590a5-3843-4540-ac41-bdfe49ae6569" Nov 24 01:25:54 crc kubenswrapper[4755]: E1124 01:25:54.918899 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:c053e34316044f14929e16e4f0d97f9f1b24cb68b5e22b925ca74c66aaaed0a7,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8rjx7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-cfbb9c588-prcbz_openstack-operators(9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.058935 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.114882 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" podUID="9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07" Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.144962 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf"] Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.152874 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh"] Nov 24 01:25:55 crc kubenswrapper[4755]: W1124 01:25:55.159737 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25b69b88_4612_4183_a978_b9dd58502d37.slice/crio-9a8bca98139d97e75b9d903e739321b08c97c5a590ed5f89d14e284f6cbc37d0 WatchSource:0}: Error finding container 9a8bca98139d97e75b9d903e739321b08c97c5a590ed5f89d14e284f6cbc37d0: Status 404 returned error can't find the container with id 9a8bca98139d97e75b9d903e739321b08c97c5a590ed5f89d14e284f6cbc37d0 Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.161160 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj"] Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.170650 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b"] Nov 24 01:25:55 crc kubenswrapper[4755]: W1124 01:25:55.172723 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8914c196_79e5_456c_9a42_1f4464f8dbf8.slice/crio-6c5b995ee85be11d05ea5eda045a356d31daf4f9976fa7af37d81679ad44e5dc WatchSource:0}: Error finding container 6c5b995ee85be11d05ea5eda045a356d31daf4f9976fa7af37d81679ad44e5dc: Status 404 returned error can't find the container with id 6c5b995ee85be11d05ea5eda045a356d31daf4f9976fa7af37d81679ad44e5dc Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.175133 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d"] Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.179176 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst"] Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.226205 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv"] Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.252062 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:4838402d41d42c56613d43dc5041aae475a2b18e6172491d6c4d4a78a580697f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hpvb7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-8c6448b9f-j56hj_openstack-operators(03ab2bfa-29d5-408b-8d69-54b8b367be23): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 01:25:55 crc kubenswrapper[4755]: W1124 01:25:55.257325 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2107694a_19fc_40cd_9ef2_b8b60b8b88e2.slice/crio-ed8ce6dfe3985731df2f81fb1b7bd045b84566f9cae627edbe334cac1b900e58 WatchSource:0}: Error finding container ed8ce6dfe3985731df2f81fb1b7bd045b84566f9cae627edbe334cac1b900e58: Status 404 returned error can't find the container with id ed8ce6dfe3985731df2f81fb1b7bd045b84566f9cae627edbe334cac1b900e58 Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.259990 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:86df58f744c1d23233cc98f6ea17c8d6da637c50003d0fc8c100045594aa9894,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j5wrh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-6dd8864d7c-zsbst_openstack-operators(fd91f6d6-1cc9-4350-a22a-b3859073f6e0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.266251 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-664d2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d_openstack-operators(2107694a-19fc-40cd-9ef2-b8b60b8b88e2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.267995 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" podUID="2107694a-19fc-40cd-9ef2-b8b60b8b88e2" Nov 24 01:25:55 crc kubenswrapper[4755]: W1124 01:25:55.285740 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62e235a5_7928_4e26_9948_a3d2a829ef23.slice/crio-d5e7da20e7bfe3260e6e6a86cac6e45a95693677087bebae2ee200af85d0a9f0 WatchSource:0}: Error finding container d5e7da20e7bfe3260e6e6a86cac6e45a95693677087bebae2ee200af85d0a9f0: Status 404 returned error can't find the container with id d5e7da20e7bfe3260e6e6a86cac6e45a95693677087bebae2ee200af85d0a9f0 Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.302175 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:c0b5f124a37c1538042c0e63f0978429572e2a851d7f3a6eb80de09b86d755a0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vzmlx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-d656998f4-kbj7b_openstack-operators(62e235a5-7928-4e26-9948-a3d2a829ef23): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 01:25:55 crc kubenswrapper[4755]: W1124 01:25:55.306665 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8695f181_2de4_4fa8_b952_8208ab710b94.slice/crio-e34d92d95e65757b6165308665e54c3b8a1972dd7ee8604729118a5b48342b6c WatchSource:0}: Error finding container e34d92d95e65757b6165308665e54c3b8a1972dd7ee8604729118a5b48342b6c: Status 404 returned error can't find the container with id e34d92d95e65757b6165308665e54c3b8a1972dd7ee8604729118a5b48342b6c Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.355203 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" event={"ID":"62e235a5-7928-4e26-9948-a3d2a829ef23","Type":"ContainerStarted","Data":"d5e7da20e7bfe3260e6e6a86cac6e45a95693677087bebae2ee200af85d0a9f0"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.357159 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" event={"ID":"25b69b88-4612-4183-a978-b9dd58502d37","Type":"ContainerStarted","Data":"9a8bca98139d97e75b9d903e739321b08c97c5a590ed5f89d14e284f6cbc37d0"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.361180 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" event={"ID":"8914c196-79e5-456c-9a42-1f4464f8dbf8","Type":"ContainerStarted","Data":"6c5b995ee85be11d05ea5eda045a356d31daf4f9976fa7af37d81679ad44e5dc"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.363374 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" event={"ID":"9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19","Type":"ContainerStarted","Data":"6a8a273ef86d7288e50ad559c19800f15068d2609af46e4c37fc7bc38599d981"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.364939 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" event={"ID":"22780566-edb3-47e3-b3ea-a42def0f4460","Type":"ContainerStarted","Data":"7b40b7f381015ac03eec531c69f57cf11f82ccaa67bf37d29f7aac026cca07fe"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.367128 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" event={"ID":"a22ef49d-f887-41f4-ad37-6b1b0bf7a748","Type":"ContainerStarted","Data":"99db6d04b1d81fe4d4cb793509b9c7e89924565964de6661bee893e75489ce6d"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.369199 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" event={"ID":"9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07","Type":"ContainerStarted","Data":"36a1e06722c89da8197e0afd6da31ee847d41e1f36bebe9a09b4b53606cb1c3e"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.369281 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" event={"ID":"9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07","Type":"ContainerStarted","Data":"e62ba03283ba3a1a4694170d27c5cf370914e1333f93d346a20c9683182f73f0"} Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.371097 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:c053e34316044f14929e16e4f0d97f9f1b24cb68b5e22b925ca74c66aaaed0a7\\\"\"" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" podUID="9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07" Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.374755 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" event={"ID":"2107694a-19fc-40cd-9ef2-b8b60b8b88e2","Type":"ContainerStarted","Data":"ed8ce6dfe3985731df2f81fb1b7bd045b84566f9cae627edbe334cac1b900e58"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.378627 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" event={"ID":"8695f181-2de4-4fa8-b952-8208ab710b94","Type":"ContainerStarted","Data":"e34d92d95e65757b6165308665e54c3b8a1972dd7ee8604729118a5b48342b6c"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.381162 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gzgn" event={"ID":"e8c1a228-1510-48be-bb0a-3d5dea46aa5e","Type":"ContainerStarted","Data":"0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.383537 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" event={"ID":"fd91f6d6-1cc9-4350-a22a-b3859073f6e0","Type":"ContainerStarted","Data":"22555347b72969aefc9bc73264e550fc748f701b8fa2e9c8101f7fbc649bbe24"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.402489 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" event={"ID":"eb1590a5-3843-4540-ac41-bdfe49ae6569","Type":"ContainerStarted","Data":"3cac185ae2d7716f7f19c2f35981418c9ddcf5ebc921657acb3f4394438d45b4"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.402531 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" event={"ID":"eb1590a5-3843-4540-ac41-bdfe49ae6569","Type":"ContainerStarted","Data":"2e05018374e9e7a7b2d76ed81eaeaedacbebe6e17163f760ee351ae0b8b5f858"} Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.403355 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" podUID="2107694a-19fc-40cd-9ef2-b8b60b8b88e2" Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.410755 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" podUID="eb1590a5-3843-4540-ac41-bdfe49ae6569" Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.417342 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" event={"ID":"73185acc-71f3-452e-8454-ebad97b6c6ad","Type":"ContainerStarted","Data":"48e0c7084d2a1b48e61a8fed71301b2f340977cdfc716a2eeb0dd69ef9a6c7d7"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.429306 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" event={"ID":"03ab2bfa-29d5-408b-8d69-54b8b367be23","Type":"ContainerStarted","Data":"acdedb50e78e7efa9cb6a1e9289b3c3a4b07313171f9e17fa552845ab65800b6"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.432067 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" event={"ID":"77153df1-136d-456e-a6e0-817b2f633d3e","Type":"ContainerStarted","Data":"56e6ff9205c8983cab32ae17b7853fee1f455f4b4efa63b5bf031aeaae07d08d"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.433353 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" event={"ID":"f443bd2d-3e36-44eb-9684-8ec505b8bea7","Type":"ContainerStarted","Data":"dbe4d89d836accbcec04b08708eed8aa051d1b82fea0ab99186ab34eaccb545e"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.436017 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65p2h" event={"ID":"7de87f2f-d51b-476a-a3e2-570b6dadedb6","Type":"ContainerStarted","Data":"add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.439331 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" event={"ID":"3c770fe2-ea89-4ba8-b4f0-95a4f310ea65","Type":"ContainerStarted","Data":"e14c3166a1dc1f2a987ff095300b8aecedb26f2b26a652830857e7d1c29cad97"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.443408 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" event={"ID":"dd14c3fa-bb96-4795-b339-a506c71b16a2","Type":"ContainerStarted","Data":"e373afa549cc6902de289f79e37ca7e864c3b08450b13f96c801b674908fc505"} Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.484105 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8gzgn" podStartSLOduration=2.8752875749999998 podStartE2EDuration="5.483646961s" podCreationTimestamp="2025-11-24 01:25:50 +0000 UTC" firstStartedPulling="2025-11-24 01:25:52.173832897 +0000 UTC m=+776.859898398" lastFinishedPulling="2025-11-24 01:25:54.782192263 +0000 UTC m=+779.468257784" observedRunningTime="2025-11-24 01:25:55.478769763 +0000 UTC m=+780.164835274" watchObservedRunningTime="2025-11-24 01:25:55.483646961 +0000 UTC m=+780.169712462" Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.693231 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-65p2h" podStartSLOduration=3.722197014 podStartE2EDuration="6.693207187s" podCreationTimestamp="2025-11-24 01:25:49 +0000 UTC" firstStartedPulling="2025-11-24 01:25:51.16223194 +0000 UTC m=+775.848297441" lastFinishedPulling="2025-11-24 01:25:54.133242113 +0000 UTC m=+778.819307614" observedRunningTime="2025-11-24 01:25:55.561032424 +0000 UTC m=+780.247097945" watchObservedRunningTime="2025-11-24 01:25:55.693207187 +0000 UTC m=+780.379272688" Nov 24 01:25:55 crc kubenswrapper[4755]: I1124 01:25:55.706122 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv"] Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.717333 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" podUID="03ab2bfa-29d5-408b-8d69-54b8b367be23" Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.835460 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" podUID="fd91f6d6-1cc9-4350-a22a-b3859073f6e0" Nov 24 01:25:55 crc kubenswrapper[4755]: E1124 01:25:55.938098 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" podUID="62e235a5-7928-4e26-9948-a3d2a829ef23" Nov 24 01:25:56 crc kubenswrapper[4755]: I1124 01:25:56.410907 4755 scope.go:117] "RemoveContainer" containerID="7d00c25c1095cb12f7a49b857e83d14eb540b0633b4878a1705c9a295f39dd99" Nov 24 01:25:56 crc kubenswrapper[4755]: I1124 01:25:56.469742 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" event={"ID":"8695f181-2de4-4fa8-b952-8208ab710b94","Type":"ContainerStarted","Data":"edde01a43a7ddeb0d32fe1f9ed0d2e428e43837be0aee570c5c5ab35f5aeb3d7"} Nov 24 01:25:56 crc kubenswrapper[4755]: I1124 01:25:56.469786 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" event={"ID":"8695f181-2de4-4fa8-b952-8208ab710b94","Type":"ContainerStarted","Data":"d04a9a36fabdd5fee64e85dff3cef007880fe383ae13ffcfabd74e7f18dd203b"} Nov 24 01:25:56 crc kubenswrapper[4755]: I1124 01:25:56.470471 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:25:56 crc kubenswrapper[4755]: I1124 01:25:56.471475 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" event={"ID":"f013c70d-8c89-40f5-a132-393403d297c2","Type":"ContainerStarted","Data":"1fc1a94af9069e2e272ffe67c218e957f671ce1a31f79704cf1dda56be069339"} Nov 24 01:25:56 crc kubenswrapper[4755]: I1124 01:25:56.474463 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" event={"ID":"fd91f6d6-1cc9-4350-a22a-b3859073f6e0","Type":"ContainerStarted","Data":"43be07e081348f9f6a650ca80345cc516fd9353e99bf06a0fee6f91240d7e5dc"} Nov 24 01:25:56 crc kubenswrapper[4755]: I1124 01:25:56.506940 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" event={"ID":"03ab2bfa-29d5-408b-8d69-54b8b367be23","Type":"ContainerStarted","Data":"de4898c619c0970128c5e6df9ba90d5ae8fc542b5785b7ea9ae54c47c129fb43"} Nov 24 01:25:56 crc kubenswrapper[4755]: E1124 01:25:56.537856 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:4838402d41d42c56613d43dc5041aae475a2b18e6172491d6c4d4a78a580697f\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" podUID="03ab2bfa-29d5-408b-8d69-54b8b367be23" Nov 24 01:25:56 crc kubenswrapper[4755]: E1124 01:25:56.538181 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:86df58f744c1d23233cc98f6ea17c8d6da637c50003d0fc8c100045594aa9894\\\"\"" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" podUID="fd91f6d6-1cc9-4350-a22a-b3859073f6e0" Nov 24 01:25:56 crc kubenswrapper[4755]: I1124 01:25:56.568681 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" event={"ID":"62e235a5-7928-4e26-9948-a3d2a829ef23","Type":"ContainerStarted","Data":"35b605766abc8733c88a734a3711ef41c8330ebfa4267bf3995a980fa7a7447b"} Nov 24 01:25:56 crc kubenswrapper[4755]: E1124 01:25:56.573140 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:c053e34316044f14929e16e4f0d97f9f1b24cb68b5e22b925ca74c66aaaed0a7\\\"\"" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" podUID="9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07" Nov 24 01:25:56 crc kubenswrapper[4755]: E1124 01:25:56.573237 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:c0b5f124a37c1538042c0e63f0978429572e2a851d7f3a6eb80de09b86d755a0\\\"\"" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" podUID="62e235a5-7928-4e26-9948-a3d2a829ef23" Nov 24 01:25:56 crc kubenswrapper[4755]: E1124 01:25:56.581926 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" podUID="2107694a-19fc-40cd-9ef2-b8b60b8b88e2" Nov 24 01:25:56 crc kubenswrapper[4755]: E1124 01:25:56.582648 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" podUID="eb1590a5-3843-4540-ac41-bdfe49ae6569" Nov 24 01:25:56 crc kubenswrapper[4755]: I1124 01:25:56.672622 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" podStartSLOduration=3.672586019 podStartE2EDuration="3.672586019s" podCreationTimestamp="2025-11-24 01:25:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:25:56.645417647 +0000 UTC m=+781.331483158" watchObservedRunningTime="2025-11-24 01:25:56.672586019 +0000 UTC m=+781.358651520" Nov 24 01:25:57 crc kubenswrapper[4755]: E1124 01:25:57.583892 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:4838402d41d42c56613d43dc5041aae475a2b18e6172491d6c4d4a78a580697f\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" podUID="03ab2bfa-29d5-408b-8d69-54b8b367be23" Nov 24 01:25:57 crc kubenswrapper[4755]: E1124 01:25:57.583939 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:86df58f744c1d23233cc98f6ea17c8d6da637c50003d0fc8c100045594aa9894\\\"\"" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" podUID="fd91f6d6-1cc9-4350-a22a-b3859073f6e0" Nov 24 01:25:57 crc kubenswrapper[4755]: E1124 01:25:57.584821 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:c0b5f124a37c1538042c0e63f0978429572e2a851d7f3a6eb80de09b86d755a0\\\"\"" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" podUID="62e235a5-7928-4e26-9948-a3d2a829ef23" Nov 24 01:25:58 crc kubenswrapper[4755]: I1124 01:25:58.626256 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:58 crc kubenswrapper[4755]: I1124 01:25:58.692196 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:25:59 crc kubenswrapper[4755]: I1124 01:25:59.951834 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:59 crc kubenswrapper[4755]: I1124 01:25:59.952151 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:25:59 crc kubenswrapper[4755]: I1124 01:25:59.989293 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:26:00 crc kubenswrapper[4755]: I1124 01:26:00.655436 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:26:00 crc kubenswrapper[4755]: I1124 01:26:00.763279 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:26:00 crc kubenswrapper[4755]: I1124 01:26:00.763369 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:26:00 crc kubenswrapper[4755]: I1124 01:26:00.813202 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:26:01 crc kubenswrapper[4755]: I1124 01:26:01.659913 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:26:02 crc kubenswrapper[4755]: I1124 01:26:02.027307 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mf98s"] Nov 24 01:26:02 crc kubenswrapper[4755]: I1124 01:26:02.027561 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mf98s" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerName="registry-server" containerID="cri-o://7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203" gracePeriod=2 Nov 24 01:26:02 crc kubenswrapper[4755]: I1124 01:26:02.622459 4755 generic.go:334] "Generic (PLEG): container finished" podID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerID="7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203" exitCode=0 Nov 24 01:26:02 crc kubenswrapper[4755]: I1124 01:26:02.622507 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mf98s" event={"ID":"81c4cadf-6ecf-43b3-88d2-9194129a34e9","Type":"ContainerDied","Data":"7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203"} Nov 24 01:26:03 crc kubenswrapper[4755]: I1124 01:26:03.427804 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-65p2h"] Nov 24 01:26:03 crc kubenswrapper[4755]: I1124 01:26:03.428351 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-65p2h" podUID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerName="registry-server" containerID="cri-o://add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc" gracePeriod=2 Nov 24 01:26:03 crc kubenswrapper[4755]: I1124 01:26:03.632711 4755 generic.go:334] "Generic (PLEG): container finished" podID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerID="add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc" exitCode=0 Nov 24 01:26:03 crc kubenswrapper[4755]: I1124 01:26:03.632762 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65p2h" event={"ID":"7de87f2f-d51b-476a-a3e2-570b6dadedb6","Type":"ContainerDied","Data":"add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc"} Nov 24 01:26:04 crc kubenswrapper[4755]: I1124 01:26:04.638795 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-68786bb554-2pljv" Nov 24 01:26:05 crc kubenswrapper[4755]: I1124 01:26:05.625536 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8gzgn"] Nov 24 01:26:05 crc kubenswrapper[4755]: I1124 01:26:05.625759 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8gzgn" podUID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerName="registry-server" containerID="cri-o://0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33" gracePeriod=2 Nov 24 01:26:06 crc kubenswrapper[4755]: I1124 01:26:06.658148 4755 generic.go:334] "Generic (PLEG): container finished" podID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerID="0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33" exitCode=0 Nov 24 01:26:06 crc kubenswrapper[4755]: I1124 01:26:06.658541 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gzgn" event={"ID":"e8c1a228-1510-48be-bb0a-3d5dea46aa5e","Type":"ContainerDied","Data":"0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33"} Nov 24 01:26:08 crc kubenswrapper[4755]: E1124 01:26:08.460304 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203 is running failed: container process not found" containerID="7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 01:26:08 crc kubenswrapper[4755]: E1124 01:26:08.461510 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203 is running failed: container process not found" containerID="7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 01:26:08 crc kubenswrapper[4755]: E1124 01:26:08.461800 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203 is running failed: container process not found" containerID="7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 01:26:08 crc kubenswrapper[4755]: E1124 01:26:08.461824 4755 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-mf98s" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerName="registry-server" Nov 24 01:26:09 crc kubenswrapper[4755]: E1124 01:26:09.953271 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc is running failed: container process not found" containerID="add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 01:26:09 crc kubenswrapper[4755]: E1124 01:26:09.953762 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc is running failed: container process not found" containerID="add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 01:26:09 crc kubenswrapper[4755]: E1124 01:26:09.954185 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc is running failed: container process not found" containerID="add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 01:26:09 crc kubenswrapper[4755]: E1124 01:26:09.954511 4755 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-65p2h" podUID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerName="registry-server" Nov 24 01:26:10 crc kubenswrapper[4755]: E1124 01:26:10.763720 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33 is running failed: container process not found" containerID="0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 01:26:10 crc kubenswrapper[4755]: E1124 01:26:10.764645 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33 is running failed: container process not found" containerID="0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 01:26:10 crc kubenswrapper[4755]: E1124 01:26:10.765216 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33 is running failed: container process not found" containerID="0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 01:26:10 crc kubenswrapper[4755]: E1124 01:26:10.765288 4755 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-8gzgn" podUID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerName="registry-server" Nov 24 01:26:11 crc kubenswrapper[4755]: E1124 01:26:11.268664 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:82207e753574d4be246f86c4b074500d66cf20214aa80f0a8525cf3287a35e6d" Nov 24 01:26:11 crc kubenswrapper[4755]: E1124 01:26:11.268926 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:82207e753574d4be246f86c4b074500d66cf20214aa80f0a8525cf3287a35e6d,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dct8w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-b4c496f69-n9dqh_openstack-operators(8914c196-79e5-456c-9a42-1f4464f8dbf8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:26:11 crc kubenswrapper[4755]: E1124 01:26:11.775035 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:b749a5dd8bc718875c3f5e81b38d54d003be77ab92de4a3e9f9595566496a58a" Nov 24 01:26:11 crc kubenswrapper[4755]: E1124 01:26:11.775238 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:b749a5dd8bc718875c3f5e81b38d54d003be77ab92de4a3e9f9595566496a58a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kkw4t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-58f887965d-2rnh9_openstack-operators(33d6bfe7-943b-4a59-bfdd-e240b869163d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:26:12 crc kubenswrapper[4755]: E1124 01:26:12.228341 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:70cce55bcf89468c5d468ca2fc317bfc3dc5f2bef1c502df9faca2eb1293ede7" Nov 24 01:26:12 crc kubenswrapper[4755]: E1124 01:26:12.228798 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:70cce55bcf89468c5d468ca2fc317bfc3dc5f2bef1c502df9faca2eb1293ede7,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qrdpk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-75fb479bcc-b297l_openstack-operators(1660a0eb-228b-41bc-a360-a71fec20d415): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.275113 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.389893 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mj6f8\" (UniqueName: \"kubernetes.io/projected/81c4cadf-6ecf-43b3-88d2-9194129a34e9-kube-api-access-mj6f8\") pod \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.389996 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-catalog-content\") pod \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.390084 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-utilities\") pod \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\" (UID: \"81c4cadf-6ecf-43b3-88d2-9194129a34e9\") " Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.391112 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-utilities" (OuterVolumeSpecName: "utilities") pod "81c4cadf-6ecf-43b3-88d2-9194129a34e9" (UID: "81c4cadf-6ecf-43b3-88d2-9194129a34e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.397404 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81c4cadf-6ecf-43b3-88d2-9194129a34e9-kube-api-access-mj6f8" (OuterVolumeSpecName: "kube-api-access-mj6f8") pod "81c4cadf-6ecf-43b3-88d2-9194129a34e9" (UID: "81c4cadf-6ecf-43b3-88d2-9194129a34e9"). InnerVolumeSpecName "kube-api-access-mj6f8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:26:12 crc kubenswrapper[4755]: E1124 01:26:12.471534 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" podUID="33d6bfe7-943b-4a59-bfdd-e240b869163d" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.498706 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.498733 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mj6f8\" (UniqueName: \"kubernetes.io/projected/81c4cadf-6ecf-43b3-88d2-9194129a34e9-kube-api-access-mj6f8\") on node \"crc\" DevicePath \"\"" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.514562 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81c4cadf-6ecf-43b3-88d2-9194129a34e9" (UID: "81c4cadf-6ecf-43b3-88d2-9194129a34e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.572305 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:26:12 crc kubenswrapper[4755]: E1124 01:26:12.595082 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" podUID="8914c196-79e5-456c-9a42-1f4464f8dbf8" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.599551 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81c4cadf-6ecf-43b3-88d2-9194129a34e9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:26:12 crc kubenswrapper[4755]: E1124 01:26:12.606088 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" podUID="1660a0eb-228b-41bc-a360-a71fec20d415" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.699969 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c8vt\" (UniqueName: \"kubernetes.io/projected/7de87f2f-d51b-476a-a3e2-570b6dadedb6-kube-api-access-9c8vt\") pod \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.700068 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-utilities\") pod \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.700133 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-catalog-content\") pod \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\" (UID: \"7de87f2f-d51b-476a-a3e2-570b6dadedb6\") " Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.701985 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-utilities" (OuterVolumeSpecName: "utilities") pod "7de87f2f-d51b-476a-a3e2-570b6dadedb6" (UID: "7de87f2f-d51b-476a-a3e2-570b6dadedb6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.711434 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7de87f2f-d51b-476a-a3e2-570b6dadedb6-kube-api-access-9c8vt" (OuterVolumeSpecName: "kube-api-access-9c8vt") pod "7de87f2f-d51b-476a-a3e2-570b6dadedb6" (UID: "7de87f2f-d51b-476a-a3e2-570b6dadedb6"). InnerVolumeSpecName "kube-api-access-9c8vt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.728249 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.747140 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gzgn" event={"ID":"e8c1a228-1510-48be-bb0a-3d5dea46aa5e","Type":"ContainerDied","Data":"62023b935f4c98473f5edf00ad364c808541d1b50956732b6d0387cfed3029f7"} Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.747190 4755 scope.go:117] "RemoveContainer" containerID="0dc24d152e5753121cce9750edfa1029f4a4777ddffcb153fd6fc744e4334d33" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.753369 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" event={"ID":"33d6bfe7-943b-4a59-bfdd-e240b869163d","Type":"ContainerStarted","Data":"e298e2cd1b5a0ee859a9873c9e97b5da395c13b4197d80d7b7bf1064fe5f58fe"} Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.755546 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7de87f2f-d51b-476a-a3e2-570b6dadedb6" (UID: "7de87f2f-d51b-476a-a3e2-570b6dadedb6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.781373 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65p2h" event={"ID":"7de87f2f-d51b-476a-a3e2-570b6dadedb6","Type":"ContainerDied","Data":"c2384c8186af41fbbd6f6c22b23b45ca520acc269ba0a2ddb8cd4245e804ec90"} Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.781485 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-65p2h" Nov 24 01:26:12 crc kubenswrapper[4755]: E1124 01:26:12.795753 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:b749a5dd8bc718875c3f5e81b38d54d003be77ab92de4a3e9f9595566496a58a\\\"\"" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" podUID="33d6bfe7-943b-4a59-bfdd-e240b869163d" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.798467 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" event={"ID":"dd14c3fa-bb96-4795-b339-a506c71b16a2","Type":"ContainerStarted","Data":"67ac3a593c345eecb77ddaf6d167e5a04e859831e18602d963f1a61f2cba23aa"} Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.798530 4755 scope.go:117] "RemoveContainer" containerID="ef9f046338e7c014242d0e23d17ed0b91c51cf306b7085502606d2ef7c362d11" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.801003 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g74qh\" (UniqueName: \"kubernetes.io/projected/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-kube-api-access-g74qh\") pod \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.801080 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-catalog-content\") pod \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.801140 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-utilities\") pod \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\" (UID: \"e8c1a228-1510-48be-bb0a-3d5dea46aa5e\") " Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.801434 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c8vt\" (UniqueName: \"kubernetes.io/projected/7de87f2f-d51b-476a-a3e2-570b6dadedb6-kube-api-access-9c8vt\") on node \"crc\" DevicePath \"\"" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.801449 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.801458 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7de87f2f-d51b-476a-a3e2-570b6dadedb6-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.803732 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-utilities" (OuterVolumeSpecName: "utilities") pod "e8c1a228-1510-48be-bb0a-3d5dea46aa5e" (UID: "e8c1a228-1510-48be-bb0a-3d5dea46aa5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.804897 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" event={"ID":"22780566-edb3-47e3-b3ea-a42def0f4460","Type":"ContainerStarted","Data":"05a81b0bef85f001467d8c63908ad9900440cf89309fbcf0db26d853e922b3b9"} Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.806184 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" event={"ID":"1660a0eb-228b-41bc-a360-a71fec20d415","Type":"ContainerStarted","Data":"694035d0a05fa4e0502944111c8d1be83ff0a1b45ed3bbd60f62217c6c4f7d81"} Nov 24 01:26:12 crc kubenswrapper[4755]: E1124 01:26:12.813947 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:70cce55bcf89468c5d468ca2fc317bfc3dc5f2bef1c502df9faca2eb1293ede7\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" podUID="1660a0eb-228b-41bc-a360-a71fec20d415" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.814372 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" event={"ID":"8914c196-79e5-456c-9a42-1f4464f8dbf8","Type":"ContainerStarted","Data":"092b618439d4ffebc5882cceb82f13211546ab86be382164bffd857df7e295ba"} Nov 24 01:26:12 crc kubenswrapper[4755]: E1124 01:26:12.816020 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:82207e753574d4be246f86c4b074500d66cf20214aa80f0a8525cf3287a35e6d\\\"\"" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" podUID="8914c196-79e5-456c-9a42-1f4464f8dbf8" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.834326 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-kube-api-access-g74qh" (OuterVolumeSpecName: "kube-api-access-g74qh") pod "e8c1a228-1510-48be-bb0a-3d5dea46aa5e" (UID: "e8c1a228-1510-48be-bb0a-3d5dea46aa5e"). InnerVolumeSpecName "kube-api-access-g74qh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.842640 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" event={"ID":"a22ef49d-f887-41f4-ad37-6b1b0bf7a748","Type":"ContainerStarted","Data":"593a1a72cc43c3a2383881b21223ed7203526bcee00e8aedc675c9a07f9be300"} Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.871122 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mf98s" event={"ID":"81c4cadf-6ecf-43b3-88d2-9194129a34e9","Type":"ContainerDied","Data":"5fa9314e43c9b3b7566c27c8ef96e5316919913ac1729ed46b0feba3cc76eb47"} Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.871230 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mf98s" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.875663 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-65p2h"] Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.902383 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g74qh\" (UniqueName: \"kubernetes.io/projected/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-kube-api-access-g74qh\") on node \"crc\" DevicePath \"\"" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.902411 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.904060 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-65p2h"] Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.918595 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e8c1a228-1510-48be-bb0a-3d5dea46aa5e" (UID: "e8c1a228-1510-48be-bb0a-3d5dea46aa5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.947428 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mf98s"] Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.954665 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mf98s"] Nov 24 01:26:12 crc kubenswrapper[4755]: I1124 01:26:12.956145 4755 scope.go:117] "RemoveContainer" containerID="bd960007a557a65cc12ea4a1b35d29fca7f6c1306032090fd7f6b44448828d0c" Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.007495 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8c1a228-1510-48be-bb0a-3d5dea46aa5e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.052513 4755 scope.go:117] "RemoveContainer" containerID="add2d86a3d812332389d282c7ee971e2ad95f7fb2d86162818113253dde5c9dc" Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.905994 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" event={"ID":"3c770fe2-ea89-4ba8-b4f0-95a4f310ea65","Type":"ContainerStarted","Data":"8f1bcba7e7b26484164853454dd10dd139219926b72f9776bcead92d758b2305"} Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.912830 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" event={"ID":"f013c70d-8c89-40f5-a132-393403d297c2","Type":"ContainerStarted","Data":"5e3aa264480948b51eadffb21492db2db60efb12ce5c4c5f13d4f6d66f2bb431"} Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.915250 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" event={"ID":"9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19","Type":"ContainerStarted","Data":"e1b7d7dc66c3dd1e9f10218fac4a7ed99fafbd9023778e04e5bf00b78d9ddc39"} Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.921489 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" event={"ID":"77153df1-136d-456e-a6e0-817b2f633d3e","Type":"ContainerStarted","Data":"7c8fa594f83dbf830ff47da8df0dac61111949025c35f018d823fb761aa76f30"} Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.922556 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gzgn" Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.925549 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" event={"ID":"1a7c3ac2-1c0f-474e-837c-b80226975978","Type":"ContainerStarted","Data":"2fda333edf39a90b852415c7be3943fe0ec20491935d088f154a73e789f9fcc9"} Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.926787 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" event={"ID":"73185acc-71f3-452e-8454-ebad97b6c6ad","Type":"ContainerStarted","Data":"c8ca296f4283a7ce5c0a6d6d5aa0176eb890796e02f4ebc8b81145173f531fb6"} Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.929062 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" event={"ID":"fc04cdee-f1bd-4d40-9c1c-02f4e9661851","Type":"ContainerStarted","Data":"8b1251462426123d2de7eec79318351ad8bfca30c10055a487b605b519cd8338"} Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.933850 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" event={"ID":"f443bd2d-3e36-44eb-9684-8ec505b8bea7","Type":"ContainerStarted","Data":"ad4be6815143a40f5e43d81559540ccaa5943ac43e84097515fa847d9deb8592"} Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.936411 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" event={"ID":"e84b7100-14c9-436c-97e5-d14c2455b42a","Type":"ContainerStarted","Data":"f8d47a757c5f4554bbd902745ea10c036fc03ca9c282a9d62acda154b761ced9"} Nov 24 01:26:13 crc kubenswrapper[4755]: E1124 01:26:13.938414 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:70cce55bcf89468c5d468ca2fc317bfc3dc5f2bef1c502df9faca2eb1293ede7\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" podUID="1660a0eb-228b-41bc-a360-a71fec20d415" Nov 24 01:26:13 crc kubenswrapper[4755]: E1124 01:26:13.938983 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:82207e753574d4be246f86c4b074500d66cf20214aa80f0a8525cf3287a35e6d\\\"\"" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" podUID="8914c196-79e5-456c-9a42-1f4464f8dbf8" Nov 24 01:26:13 crc kubenswrapper[4755]: E1124 01:26:13.939391 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:b749a5dd8bc718875c3f5e81b38d54d003be77ab92de4a3e9f9595566496a58a\\\"\"" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" podUID="33d6bfe7-943b-4a59-bfdd-e240b869163d" Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.947483 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8gzgn"] Nov 24 01:26:13 crc kubenswrapper[4755]: I1124 01:26:13.959912 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8gzgn"] Nov 24 01:26:14 crc kubenswrapper[4755]: I1124 01:26:14.054946 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" path="/var/lib/kubelet/pods/7de87f2f-d51b-476a-a3e2-570b6dadedb6/volumes" Nov 24 01:26:14 crc kubenswrapper[4755]: I1124 01:26:14.059589 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" path="/var/lib/kubelet/pods/81c4cadf-6ecf-43b3-88d2-9194129a34e9/volumes" Nov 24 01:26:14 crc kubenswrapper[4755]: I1124 01:26:14.060219 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" path="/var/lib/kubelet/pods/e8c1a228-1510-48be-bb0a-3d5dea46aa5e/volumes" Nov 24 01:26:14 crc kubenswrapper[4755]: I1124 01:26:14.944099 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" event={"ID":"22780566-edb3-47e3-b3ea-a42def0f4460","Type":"ContainerStarted","Data":"1682a4848f65880c16101b5806afc0a18e976da74770ca10becd9f82a37fc035"} Nov 24 01:26:14 crc kubenswrapper[4755]: I1124 01:26:14.944450 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" Nov 24 01:26:14 crc kubenswrapper[4755]: I1124 01:26:14.946224 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" event={"ID":"a22ef49d-f887-41f4-ad37-6b1b0bf7a748","Type":"ContainerStarted","Data":"541ba2f680428dc4ca23f1b345993adcf03c202c797459b28651525b6176b350"} Nov 24 01:26:14 crc kubenswrapper[4755]: I1124 01:26:14.946301 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" Nov 24 01:26:14 crc kubenswrapper[4755]: I1124 01:26:14.948538 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" event={"ID":"25b69b88-4612-4183-a978-b9dd58502d37","Type":"ContainerStarted","Data":"aa77360dde61a7dd237b96d986267181dfda1f2a6c7ff7dc21a8a2403de5d9cb"} Nov 24 01:26:14 crc kubenswrapper[4755]: I1124 01:26:14.964574 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" podStartSLOduration=5.13097239 podStartE2EDuration="22.964554086s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.480924148 +0000 UTC m=+779.166989669" lastFinishedPulling="2025-11-24 01:26:12.314505864 +0000 UTC m=+797.000571365" observedRunningTime="2025-11-24 01:26:14.958069266 +0000 UTC m=+799.644134757" watchObservedRunningTime="2025-11-24 01:26:14.964554086 +0000 UTC m=+799.650619587" Nov 24 01:26:14 crc kubenswrapper[4755]: I1124 01:26:14.996273 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" podStartSLOduration=5.4020403980000005 podStartE2EDuration="22.996258606s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.721378875 +0000 UTC m=+779.407444376" lastFinishedPulling="2025-11-24 01:26:12.315597083 +0000 UTC m=+797.001662584" observedRunningTime="2025-11-24 01:26:14.992500708 +0000 UTC m=+799.678566199" watchObservedRunningTime="2025-11-24 01:26:14.996258606 +0000 UTC m=+799.682324107" Nov 24 01:26:15 crc kubenswrapper[4755]: I1124 01:26:15.867809 4755 scope.go:117] "RemoveContainer" containerID="09b0a5a54ba499ea1f7821a9bbe73678f47da834da761ec5b9fbae70778f73a6" Nov 24 01:26:16 crc kubenswrapper[4755]: I1124 01:26:16.972761 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" event={"ID":"dd14c3fa-bb96-4795-b339-a506c71b16a2","Type":"ContainerStarted","Data":"24b10a5f9121f6c186184fef22b57456094c1532cd4ca0b1d2edd51b7ed80f86"} Nov 24 01:26:16 crc kubenswrapper[4755]: I1124 01:26:16.973093 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" Nov 24 01:26:16 crc kubenswrapper[4755]: I1124 01:26:16.992427 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" podStartSLOduration=7.157536809 podStartE2EDuration="24.992405599s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.480886417 +0000 UTC m=+779.166951918" lastFinishedPulling="2025-11-24 01:26:12.315755207 +0000 UTC m=+797.001820708" observedRunningTime="2025-11-24 01:26:16.989188705 +0000 UTC m=+801.675254216" watchObservedRunningTime="2025-11-24 01:26:16.992405599 +0000 UTC m=+801.678471100" Nov 24 01:26:17 crc kubenswrapper[4755]: I1124 01:26:17.979382 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7454b96578-5622b" Nov 24 01:26:19 crc kubenswrapper[4755]: I1124 01:26:19.109987 4755 scope.go:117] "RemoveContainer" containerID="ecd29b8e139706b4c311bb86011176bdff6c350f9be2a4eacc47b743c903cc82" Nov 24 01:26:19 crc kubenswrapper[4755]: I1124 01:26:19.575297 4755 scope.go:117] "RemoveContainer" containerID="7ef7bf1467618d4ac800ee9080a9123a07ecdc8550c716cbcae3a6fdadfd2203" Nov 24 01:26:19 crc kubenswrapper[4755]: I1124 01:26:19.698947 4755 scope.go:117] "RemoveContainer" containerID="25ad85ae1511ef84637aca16ba383e90f37c3cd779f2b29cdd3a607fc4896553" Nov 24 01:26:19 crc kubenswrapper[4755]: I1124 01:26:19.785022 4755 scope.go:117] "RemoveContainer" containerID="a8c8dd811f7cb8151cdc29e9a5769d615f87c858007038c47699e53720a12c80" Nov 24 01:26:20 crc kubenswrapper[4755]: I1124 01:26:20.011735 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" event={"ID":"9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19","Type":"ContainerStarted","Data":"e923590e00ce81718f85e5ee2a122b6652d5fed7910225de41fb4aad2d8aa54e"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.019596 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" event={"ID":"fc04cdee-f1bd-4d40-9c1c-02f4e9661851","Type":"ContainerStarted","Data":"994f781691a117ac1a169c82377ba2341ff74c2594467b5f4036d454df40e295"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.021255 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.027822 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.029012 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" event={"ID":"3c770fe2-ea89-4ba8-b4f0-95a4f310ea65","Type":"ContainerStarted","Data":"ce2301d1e4e7d0e0518c6184548104aa2944a1478fb59135bec87f28408adf62"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.030636 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.031806 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.040007 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" event={"ID":"fd91f6d6-1cc9-4350-a22a-b3859073f6e0","Type":"ContainerStarted","Data":"6c923093f58074480a6d46c9eedecaaf93a1a109dd3e1ed77946cebd99a7348a"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.041864 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" event={"ID":"1a7c3ac2-1c0f-474e-837c-b80226975978","Type":"ContainerStarted","Data":"9c81196e11ac919c1df7b2d70e9bc11ea6e5025ca5f98072e2fc4fc6995dc3d4"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.042424 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.043350 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6498cbf48f-hh7kh" podStartSLOduration=10.893849328 podStartE2EDuration="29.043339363s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.174864547 +0000 UTC m=+778.860930048" lastFinishedPulling="2025-11-24 01:26:12.324354582 +0000 UTC m=+797.010420083" observedRunningTime="2025-11-24 01:26:21.04285174 +0000 UTC m=+805.728917251" watchObservedRunningTime="2025-11-24 01:26:21.043339363 +0000 UTC m=+805.729404864" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.044694 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.045706 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" event={"ID":"9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07","Type":"ContainerStarted","Data":"7cca9d426cdc52695c4273fbe1a5c59b588ccf936accad3046db906d56eadc9f"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.046514 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.049701 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" event={"ID":"73185acc-71f3-452e-8454-ebad97b6c6ad","Type":"ContainerStarted","Data":"2c7b2b94a06358199eb693407345d5fde64bc6a6b69bea46bac65f1175ad4207"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.050380 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.050439 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.055177 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.057307 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" event={"ID":"62e235a5-7928-4e26-9948-a3d2a829ef23","Type":"ContainerStarted","Data":"6237f9edf04a075ec3bd9f4d69c964ca64c2b309e308e9e1745a4577eb89624b"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.057750 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.060252 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" event={"ID":"f013c70d-8c89-40f5-a132-393403d297c2","Type":"ContainerStarted","Data":"b9616fcd592e022a5b6c73064ca884155110c19039219927e90bbcfd6ce7a86d"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.061357 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.063759 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" event={"ID":"eb1590a5-3843-4540-ac41-bdfe49ae6569","Type":"ContainerStarted","Data":"f5fc1fd3cfc3a32b839cb9612dd8e41c19ab6138aa8c803390051edf0520000b"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.064158 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.065908 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" event={"ID":"2107694a-19fc-40cd-9ef2-b8b60b8b88e2","Type":"ContainerStarted","Data":"2342b592584627626ae6867a9a2366819442257ea08da33a6dee350da9da72eb"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.075056 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" event={"ID":"e84b7100-14c9-436c-97e5-d14c2455b42a","Type":"ContainerStarted","Data":"c08a3f0c495785dcceb8d09b6f036651a8b8febd467a7885c5fabfe77d4e1eb1"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.075329 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.075349 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.083529 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.085314 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" event={"ID":"25b69b88-4612-4183-a978-b9dd58502d37","Type":"ContainerStarted","Data":"6fd3e7ed6a4910c8a8933d874182e6fad9846d1c43ce611b803915bb784f0c57"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.086225 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.098319 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.099992 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" event={"ID":"03ab2bfa-29d5-408b-8d69-54b8b367be23","Type":"ContainerStarted","Data":"e8c94567efe0dafa265f561bc7d0e45723dfa43f483e99ac7c81c01e0bba492a"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.100658 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.102122 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" event={"ID":"77153df1-136d-456e-a6e0-817b2f633d3e","Type":"ContainerStarted","Data":"f7ef3fca9a77ec0b0816a68cdf12090b42120d3e2ec5ae307236fa9fafa64615"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.103079 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.104078 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.106551 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" event={"ID":"f443bd2d-3e36-44eb-9684-8ec505b8bea7","Type":"ContainerStarted","Data":"e2ffedb66011753965a33454da30c4b04932e142455b1c58f51c3afaa1bfa5b2"} Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.106624 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.106790 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.109060 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.115431 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.115415 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" podStartSLOduration=5.265137785 podStartE2EDuration="29.11539326s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:55.259867892 +0000 UTC m=+779.945933393" lastFinishedPulling="2025-11-24 01:26:19.110123367 +0000 UTC m=+803.796188868" observedRunningTime="2025-11-24 01:26:21.098743894 +0000 UTC m=+805.784809405" watchObservedRunningTime="2025-11-24 01:26:21.11539326 +0000 UTC m=+805.801458761" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.118448 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-54cfbf4c7d-7t6ft" podStartSLOduration=11.513874951 podStartE2EDuration="29.11843773s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.721001135 +0000 UTC m=+779.407066636" lastFinishedPulling="2025-11-24 01:26:12.325563914 +0000 UTC m=+797.011629415" observedRunningTime="2025-11-24 01:26:21.114095796 +0000 UTC m=+805.800161307" watchObservedRunningTime="2025-11-24 01:26:21.11843773 +0000 UTC m=+805.804503231" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.141335 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-54b5986bb8-gqlfb" podStartSLOduration=11.119267222 podStartE2EDuration="29.141317169s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.302752637 +0000 UTC m=+778.988818138" lastFinishedPulling="2025-11-24 01:26:12.324802594 +0000 UTC m=+797.010868085" observedRunningTime="2025-11-24 01:26:21.134461429 +0000 UTC m=+805.820526930" watchObservedRunningTime="2025-11-24 01:26:21.141317169 +0000 UTC m=+805.827382670" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.167944 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d" podStartSLOduration=3.843913304 podStartE2EDuration="28.167910356s" podCreationTimestamp="2025-11-24 01:25:53 +0000 UTC" firstStartedPulling="2025-11-24 01:25:55.266130747 +0000 UTC m=+779.952196248" lastFinishedPulling="2025-11-24 01:26:19.590127799 +0000 UTC m=+804.276193300" observedRunningTime="2025-11-24 01:26:21.16539879 +0000 UTC m=+805.851464291" watchObservedRunningTime="2025-11-24 01:26:21.167910356 +0000 UTC m=+805.853975857" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.186043 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-767ccfd65f-fdwk5" podStartSLOduration=10.887171589 podStartE2EDuration="29.18602569s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.010977711 +0000 UTC m=+778.697043202" lastFinishedPulling="2025-11-24 01:26:12.309831802 +0000 UTC m=+796.995897303" observedRunningTime="2025-11-24 01:26:21.183236377 +0000 UTC m=+805.869301878" watchObservedRunningTime="2025-11-24 01:26:21.18602569 +0000 UTC m=+805.872091191" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.207462 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-78bd47f458-pcp8q" podStartSLOduration=11.797453083 podStartE2EDuration="29.207446111s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.910641527 +0000 UTC m=+779.596707028" lastFinishedPulling="2025-11-24 01:26:12.320634555 +0000 UTC m=+797.006700056" observedRunningTime="2025-11-24 01:26:21.205794138 +0000 UTC m=+805.891859659" watchObservedRunningTime="2025-11-24 01:26:21.207446111 +0000 UTC m=+805.893511612" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.240851 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-598f69df5d-97znt" podStartSLOduration=11.364445545 podStartE2EDuration="29.240833156s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.448858875 +0000 UTC m=+779.134924366" lastFinishedPulling="2025-11-24 01:26:12.325246476 +0000 UTC m=+797.011311977" observedRunningTime="2025-11-24 01:26:21.239942372 +0000 UTC m=+805.926007873" watchObservedRunningTime="2025-11-24 01:26:21.240833156 +0000 UTC m=+805.926898657" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.242258 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" podStartSLOduration=3.832381718 podStartE2EDuration="28.242251583s" podCreationTimestamp="2025-11-24 01:25:53 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.722588677 +0000 UTC m=+779.408654178" lastFinishedPulling="2025-11-24 01:26:19.132458542 +0000 UTC m=+803.818524043" observedRunningTime="2025-11-24 01:26:21.226284554 +0000 UTC m=+805.912350055" watchObservedRunningTime="2025-11-24 01:26:21.242251583 +0000 UTC m=+805.928317084" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.261497 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-6d4bf84b58-7pmzf" podStartSLOduration=11.181347155 podStartE2EDuration="28.261480766s" podCreationTimestamp="2025-11-24 01:25:53 +0000 UTC" firstStartedPulling="2025-11-24 01:25:55.246117901 +0000 UTC m=+779.932183402" lastFinishedPulling="2025-11-24 01:26:12.326251512 +0000 UTC m=+797.012317013" observedRunningTime="2025-11-24 01:26:21.254839932 +0000 UTC m=+805.940905433" watchObservedRunningTime="2025-11-24 01:26:21.261480766 +0000 UTC m=+805.947546267" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.286495 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv" podStartSLOduration=12.702719481 podStartE2EDuration="29.286473221s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:55.738000034 +0000 UTC m=+780.424065535" lastFinishedPulling="2025-11-24 01:26:12.321753774 +0000 UTC m=+797.007819275" observedRunningTime="2025-11-24 01:26:21.284559801 +0000 UTC m=+805.970625302" watchObservedRunningTime="2025-11-24 01:26:21.286473221 +0000 UTC m=+805.972538722" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.304432 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-99b499f4-p9wgw" podStartSLOduration=11.659795909 podStartE2EDuration="29.304415061s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.676700231 +0000 UTC m=+779.362765732" lastFinishedPulling="2025-11-24 01:26:12.321319383 +0000 UTC m=+797.007384884" observedRunningTime="2025-11-24 01:26:21.303259851 +0000 UTC m=+805.989325352" watchObservedRunningTime="2025-11-24 01:26:21.304415061 +0000 UTC m=+805.990480562" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.324299 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" podStartSLOduration=3.863922535 podStartE2EDuration="28.324283481s" podCreationTimestamp="2025-11-24 01:25:53 +0000 UTC" firstStartedPulling="2025-11-24 01:25:55.251925313 +0000 UTC m=+779.937990804" lastFinishedPulling="2025-11-24 01:26:19.712286249 +0000 UTC m=+804.398351750" observedRunningTime="2025-11-24 01:26:21.32079792 +0000 UTC m=+806.006863451" watchObservedRunningTime="2025-11-24 01:26:21.324283481 +0000 UTC m=+806.010348982" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.363275 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" podStartSLOduration=4.7723249899999995 podStartE2EDuration="29.363230591s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.918764371 +0000 UTC m=+779.604829872" lastFinishedPulling="2025-11-24 01:26:19.509669962 +0000 UTC m=+804.195735473" observedRunningTime="2025-11-24 01:26:21.361174728 +0000 UTC m=+806.047240229" watchObservedRunningTime="2025-11-24 01:26:21.363230591 +0000 UTC m=+806.049296092" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.379509 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" podStartSLOduration=4.141636733 podStartE2EDuration="28.379491587s" podCreationTimestamp="2025-11-24 01:25:53 +0000 UTC" firstStartedPulling="2025-11-24 01:25:55.30203521 +0000 UTC m=+779.988100711" lastFinishedPulling="2025-11-24 01:26:19.539890064 +0000 UTC m=+804.225955565" observedRunningTime="2025-11-24 01:26:21.379464317 +0000 UTC m=+806.065529828" watchObservedRunningTime="2025-11-24 01:26:21.379491587 +0000 UTC m=+806.065557088" Nov 24 01:26:21 crc kubenswrapper[4755]: I1124 01:26:21.395549 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-54fc5f65b7-b94wx" podStartSLOduration=11.516270981 podStartE2EDuration="29.395527097s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.44407873 +0000 UTC m=+779.130144221" lastFinishedPulling="2025-11-24 01:26:12.323334836 +0000 UTC m=+797.009400337" observedRunningTime="2025-11-24 01:26:21.391665406 +0000 UTC m=+806.077730917" watchObservedRunningTime="2025-11-24 01:26:21.395527097 +0000 UTC m=+806.081592598" Nov 24 01:26:23 crc kubenswrapper[4755]: I1124 01:26:23.920948 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-56f54d6746-45h29" Nov 24 01:26:23 crc kubenswrapper[4755]: I1124 01:26:23.943833 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-7969689c84-bnmvb" Nov 24 01:26:29 crc kubenswrapper[4755]: I1124 01:26:29.169089 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" event={"ID":"1660a0eb-228b-41bc-a360-a71fec20d415","Type":"ContainerStarted","Data":"94962b7e5849d7d13cd7f6c7d6d97e550048fd748a0197c3c653e5531121d6d9"} Nov 24 01:26:29 crc kubenswrapper[4755]: I1124 01:26:29.169805 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" Nov 24 01:26:29 crc kubenswrapper[4755]: I1124 01:26:29.170718 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" event={"ID":"33d6bfe7-943b-4a59-bfdd-e240b869163d","Type":"ContainerStarted","Data":"5ec8b942eb6b05207a54f9674a62dddd2f09cb8082b3d987f3bda6caa5fcccc6"} Nov 24 01:26:29 crc kubenswrapper[4755]: I1124 01:26:29.170908 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" Nov 24 01:26:29 crc kubenswrapper[4755]: I1124 01:26:29.187361 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" podStartSLOduration=2.775816296 podStartE2EDuration="37.187341482s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.285696869 +0000 UTC m=+778.971762370" lastFinishedPulling="2025-11-24 01:26:28.697222045 +0000 UTC m=+813.383287556" observedRunningTime="2025-11-24 01:26:29.185814562 +0000 UTC m=+813.871880073" watchObservedRunningTime="2025-11-24 01:26:29.187341482 +0000 UTC m=+813.873406993" Nov 24 01:26:29 crc kubenswrapper[4755]: I1124 01:26:29.205668 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" podStartSLOduration=2.982444956 podStartE2EDuration="37.205646292s" podCreationTimestamp="2025-11-24 01:25:52 +0000 UTC" firstStartedPulling="2025-11-24 01:25:54.255869865 +0000 UTC m=+778.941935366" lastFinishedPulling="2025-11-24 01:26:28.479071201 +0000 UTC m=+813.165136702" observedRunningTime="2025-11-24 01:26:29.204262096 +0000 UTC m=+813.890327617" watchObservedRunningTime="2025-11-24 01:26:29.205646292 +0000 UTC m=+813.891711793" Nov 24 01:26:30 crc kubenswrapper[4755]: I1124 01:26:30.179911 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" event={"ID":"8914c196-79e5-456c-9a42-1f4464f8dbf8","Type":"ContainerStarted","Data":"9b3f3b83774813a519058f4f741f132d7dfd6c7422df3c079534f1f7fb911055"} Nov 24 01:26:30 crc kubenswrapper[4755]: I1124 01:26:30.180481 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" Nov 24 01:26:30 crc kubenswrapper[4755]: I1124 01:26:30.201959 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" podStartSLOduration=3.020100372 podStartE2EDuration="37.201940906s" podCreationTimestamp="2025-11-24 01:25:53 +0000 UTC" firstStartedPulling="2025-11-24 01:25:55.228370075 +0000 UTC m=+779.914435576" lastFinishedPulling="2025-11-24 01:26:29.410210609 +0000 UTC m=+814.096276110" observedRunningTime="2025-11-24 01:26:30.198536327 +0000 UTC m=+814.884601838" watchObservedRunningTime="2025-11-24 01:26:30.201940906 +0000 UTC m=+814.888006397" Nov 24 01:26:33 crc kubenswrapper[4755]: I1124 01:26:33.004450 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-75fb479bcc-b297l" Nov 24 01:26:33 crc kubenswrapper[4755]: I1124 01:26:33.342951 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-58f887965d-2rnh9" Nov 24 01:26:33 crc kubenswrapper[4755]: I1124 01:26:33.586640 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-cfbb9c588-prcbz" Nov 24 01:26:33 crc kubenswrapper[4755]: I1124 01:26:33.658453 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5b797b8dff-s22mt" Nov 24 01:26:33 crc kubenswrapper[4755]: I1124 01:26:33.670257 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-d656998f4-kbj7b" Nov 24 01:26:33 crc kubenswrapper[4755]: I1124 01:26:33.797021 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-6dd8864d7c-zsbst" Nov 24 01:26:34 crc kubenswrapper[4755]: I1124 01:26:34.036523 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-8c6448b9f-j56hj" Nov 24 01:26:43 crc kubenswrapper[4755]: I1124 01:26:43.708031 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-b4c496f69-n9dqh" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.409835 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nmftn"] Nov 24 01:27:00 crc kubenswrapper[4755]: E1124 01:27:00.412153 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerName="extract-content" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412203 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerName="extract-content" Nov 24 01:27:00 crc kubenswrapper[4755]: E1124 01:27:00.412236 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerName="extract-content" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412244 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerName="extract-content" Nov 24 01:27:00 crc kubenswrapper[4755]: E1124 01:27:00.412290 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerName="extract-content" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412300 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerName="extract-content" Nov 24 01:27:00 crc kubenswrapper[4755]: E1124 01:27:00.412323 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerName="extract-utilities" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412331 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerName="extract-utilities" Nov 24 01:27:00 crc kubenswrapper[4755]: E1124 01:27:00.412383 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerName="extract-utilities" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412392 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerName="extract-utilities" Nov 24 01:27:00 crc kubenswrapper[4755]: E1124 01:27:00.412410 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerName="registry-server" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412416 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerName="registry-server" Nov 24 01:27:00 crc kubenswrapper[4755]: E1124 01:27:00.412451 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerName="registry-server" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412459 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerName="registry-server" Nov 24 01:27:00 crc kubenswrapper[4755]: E1124 01:27:00.412473 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerName="registry-server" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412480 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerName="registry-server" Nov 24 01:27:00 crc kubenswrapper[4755]: E1124 01:27:00.412491 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerName="extract-utilities" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412498 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerName="extract-utilities" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412752 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="81c4cadf-6ecf-43b3-88d2-9194129a34e9" containerName="registry-server" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412807 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8c1a228-1510-48be-bb0a-3d5dea46aa5e" containerName="registry-server" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.412825 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7de87f2f-d51b-476a-a3e2-570b6dadedb6" containerName="registry-server" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.413969 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.417964 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.418153 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.418322 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-k54fc" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.418433 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.446440 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nmftn"] Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.471849 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mn9d6"] Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.473043 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.479215 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.483992 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mn9d6"] Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.547319 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpvd2\" (UniqueName: \"kubernetes.io/projected/61197ade-de23-4903-bb62-438e07080dbd-kube-api-access-fpvd2\") pod \"dnsmasq-dns-675f4bcbfc-nmftn\" (UID: \"61197ade-de23-4903-bb62-438e07080dbd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.547377 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-config\") pod \"dnsmasq-dns-78dd6ddcc-mn9d6\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.547430 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tvln\" (UniqueName: \"kubernetes.io/projected/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-kube-api-access-7tvln\") pod \"dnsmasq-dns-78dd6ddcc-mn9d6\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.547481 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61197ade-de23-4903-bb62-438e07080dbd-config\") pod \"dnsmasq-dns-675f4bcbfc-nmftn\" (UID: \"61197ade-de23-4903-bb62-438e07080dbd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.547508 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mn9d6\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.649251 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tvln\" (UniqueName: \"kubernetes.io/projected/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-kube-api-access-7tvln\") pod \"dnsmasq-dns-78dd6ddcc-mn9d6\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.649349 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mn9d6\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.649376 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61197ade-de23-4903-bb62-438e07080dbd-config\") pod \"dnsmasq-dns-675f4bcbfc-nmftn\" (UID: \"61197ade-de23-4903-bb62-438e07080dbd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.649428 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpvd2\" (UniqueName: \"kubernetes.io/projected/61197ade-de23-4903-bb62-438e07080dbd-kube-api-access-fpvd2\") pod \"dnsmasq-dns-675f4bcbfc-nmftn\" (UID: \"61197ade-de23-4903-bb62-438e07080dbd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.649469 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-config\") pod \"dnsmasq-dns-78dd6ddcc-mn9d6\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.650223 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mn9d6\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.650353 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-config\") pod \"dnsmasq-dns-78dd6ddcc-mn9d6\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.650409 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61197ade-de23-4903-bb62-438e07080dbd-config\") pod \"dnsmasq-dns-675f4bcbfc-nmftn\" (UID: \"61197ade-de23-4903-bb62-438e07080dbd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.668431 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpvd2\" (UniqueName: \"kubernetes.io/projected/61197ade-de23-4903-bb62-438e07080dbd-kube-api-access-fpvd2\") pod \"dnsmasq-dns-675f4bcbfc-nmftn\" (UID: \"61197ade-de23-4903-bb62-438e07080dbd\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.668513 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tvln\" (UniqueName: \"kubernetes.io/projected/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-kube-api-access-7tvln\") pod \"dnsmasq-dns-78dd6ddcc-mn9d6\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.737055 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" Nov 24 01:27:00 crc kubenswrapper[4755]: I1124 01:27:00.790571 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:01 crc kubenswrapper[4755]: I1124 01:27:01.209013 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nmftn"] Nov 24 01:27:01 crc kubenswrapper[4755]: I1124 01:27:01.253296 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mn9d6"] Nov 24 01:27:01 crc kubenswrapper[4755]: W1124 01:27:01.257466 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a3ac5d9_abf5_430d_9d26_e6a308a6b1d7.slice/crio-7bfbeab98f928a767a345d593c8c600a813216347702672c5f988eb69eb74217 WatchSource:0}: Error finding container 7bfbeab98f928a767a345d593c8c600a813216347702672c5f988eb69eb74217: Status 404 returned error can't find the container with id 7bfbeab98f928a767a345d593c8c600a813216347702672c5f988eb69eb74217 Nov 24 01:27:01 crc kubenswrapper[4755]: I1124 01:27:01.452242 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" event={"ID":"61197ade-de23-4903-bb62-438e07080dbd","Type":"ContainerStarted","Data":"44837af499a730391a424139e9a9648bbb7881a84136a366cbcefb9a1edfd4e1"} Nov 24 01:27:01 crc kubenswrapper[4755]: I1124 01:27:01.453357 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" event={"ID":"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7","Type":"ContainerStarted","Data":"7bfbeab98f928a767a345d593c8c600a813216347702672c5f988eb69eb74217"} Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.372969 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nmftn"] Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.400266 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rw7mx"] Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.401908 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.415050 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rw7mx"] Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.518812 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-dns-svc\") pod \"dnsmasq-dns-666b6646f7-rw7mx\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.518911 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-config\") pod \"dnsmasq-dns-666b6646f7-rw7mx\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.519038 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tfbg\" (UniqueName: \"kubernetes.io/projected/a6a2b41a-a90f-4468-9df6-d81d680baa4b-kube-api-access-9tfbg\") pod \"dnsmasq-dns-666b6646f7-rw7mx\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.619951 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-dns-svc\") pod \"dnsmasq-dns-666b6646f7-rw7mx\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.620054 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-config\") pod \"dnsmasq-dns-666b6646f7-rw7mx\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.620086 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tfbg\" (UniqueName: \"kubernetes.io/projected/a6a2b41a-a90f-4468-9df6-d81d680baa4b-kube-api-access-9tfbg\") pod \"dnsmasq-dns-666b6646f7-rw7mx\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.620862 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-dns-svc\") pod \"dnsmasq-dns-666b6646f7-rw7mx\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.621602 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-config\") pod \"dnsmasq-dns-666b6646f7-rw7mx\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.641706 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mn9d6"] Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.654478 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tfbg\" (UniqueName: \"kubernetes.io/projected/a6a2b41a-a90f-4468-9df6-d81d680baa4b-kube-api-access-9tfbg\") pod \"dnsmasq-dns-666b6646f7-rw7mx\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.671848 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lh52s"] Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.672929 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.694683 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lh52s"] Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.718375 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.823603 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-config\") pod \"dnsmasq-dns-57d769cc4f-lh52s\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.824248 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-lh52s\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.824285 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx8pm\" (UniqueName: \"kubernetes.io/projected/88fbee01-1663-4ae9-9776-84ad70bfc066-kube-api-access-jx8pm\") pod \"dnsmasq-dns-57d769cc4f-lh52s\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.926627 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-lh52s\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.926682 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx8pm\" (UniqueName: \"kubernetes.io/projected/88fbee01-1663-4ae9-9776-84ad70bfc066-kube-api-access-jx8pm\") pod \"dnsmasq-dns-57d769cc4f-lh52s\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.926710 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-config\") pod \"dnsmasq-dns-57d769cc4f-lh52s\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.928117 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-config\") pod \"dnsmasq-dns-57d769cc4f-lh52s\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.931221 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-lh52s\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.951271 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx8pm\" (UniqueName: \"kubernetes.io/projected/88fbee01-1663-4ae9-9776-84ad70bfc066-kube-api-access-jx8pm\") pod \"dnsmasq-dns-57d769cc4f-lh52s\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:03 crc kubenswrapper[4755]: I1124 01:27:03.992984 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.209407 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rw7mx"] Nov 24 01:27:04 crc kubenswrapper[4755]: W1124 01:27:04.222024 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6a2b41a_a90f_4468_9df6_d81d680baa4b.slice/crio-dd53e334b4055785f8375d111adf52d1c4ca8ae1927e87bfdf2f27a9ff23ead7 WatchSource:0}: Error finding container dd53e334b4055785f8375d111adf52d1c4ca8ae1927e87bfdf2f27a9ff23ead7: Status 404 returned error can't find the container with id dd53e334b4055785f8375d111adf52d1c4ca8ae1927e87bfdf2f27a9ff23ead7 Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.502580 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" event={"ID":"a6a2b41a-a90f-4468-9df6-d81d680baa4b","Type":"ContainerStarted","Data":"dd53e334b4055785f8375d111adf52d1c4ca8ae1927e87bfdf2f27a9ff23ead7"} Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.523230 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lh52s"] Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.537850 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.539036 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.542344 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.544790 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.544826 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.544860 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-g5576" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.545951 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.552025 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.552235 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.552410 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636061 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-config-data\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636131 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636191 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q2wb\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-kube-api-access-5q2wb\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636216 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636240 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636259 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636274 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-server-conf\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636295 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636324 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636347 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.636363 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-pod-info\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737203 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737258 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q2wb\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-kube-api-access-5q2wb\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737288 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737314 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737335 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737351 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-server-conf\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737371 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737398 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737423 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737451 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-pod-info\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.737482 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-config-data\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.738186 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.738432 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.738707 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-config-data\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.738723 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.739756 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.740392 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-server-conf\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.746414 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-pod-info\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.746488 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.747029 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.747585 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.753697 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q2wb\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-kube-api-access-5q2wb\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.763386 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.804183 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.805848 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.810361 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.810516 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.810581 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.810711 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.810814 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.810981 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dv59m" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.812758 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.816952 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.876647 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940049 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940350 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/20a66507-c5f4-43d2-a99b-18daaffea30f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940381 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940414 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/20a66507-c5f4-43d2-a99b-18daaffea30f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940435 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz5ws\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-kube-api-access-lz5ws\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940450 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940674 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940749 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940848 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940934 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:04 crc kubenswrapper[4755]: I1124 01:27:04.940979 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.042840 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.042912 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.042945 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/20a66507-c5f4-43d2-a99b-18daaffea30f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.042972 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.043132 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/20a66507-c5f4-43d2-a99b-18daaffea30f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.043230 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz5ws\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-kube-api-access-lz5ws\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.043348 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.043403 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.043428 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.043465 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.043676 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.044014 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.044197 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.044227 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.044527 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.045382 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.045708 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.047898 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.048764 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.049717 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/20a66507-c5f4-43d2-a99b-18daaffea30f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.065795 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/20a66507-c5f4-43d2-a99b-18daaffea30f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.066797 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz5ws\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-kube-api-access-lz5ws\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.084761 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:05 crc kubenswrapper[4755]: I1124 01:27:05.136332 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.488077 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.491073 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.494671 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.494702 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.495010 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-h9jbn" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.495137 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.496972 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.500911 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.567499 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/22215216-efac-4810-90f1-4d42ccc6399c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.567932 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.567973 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/22215216-efac-4810-90f1-4d42ccc6399c-config-data-default\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.568037 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/22215216-efac-4810-90f1-4d42ccc6399c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.568059 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22215216-efac-4810-90f1-4d42ccc6399c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.568099 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/22215216-efac-4810-90f1-4d42ccc6399c-kolla-config\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.568140 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22215216-efac-4810-90f1-4d42ccc6399c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.568200 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-899vq\" (UniqueName: \"kubernetes.io/projected/22215216-efac-4810-90f1-4d42ccc6399c-kube-api-access-899vq\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.669669 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-899vq\" (UniqueName: \"kubernetes.io/projected/22215216-efac-4810-90f1-4d42ccc6399c-kube-api-access-899vq\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.669749 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/22215216-efac-4810-90f1-4d42ccc6399c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.669779 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.669813 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/22215216-efac-4810-90f1-4d42ccc6399c-config-data-default\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.669836 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/22215216-efac-4810-90f1-4d42ccc6399c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.669856 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22215216-efac-4810-90f1-4d42ccc6399c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.669892 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/22215216-efac-4810-90f1-4d42ccc6399c-kolla-config\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.669930 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22215216-efac-4810-90f1-4d42ccc6399c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.669994 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.670442 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/22215216-efac-4810-90f1-4d42ccc6399c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.670833 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/22215216-efac-4810-90f1-4d42ccc6399c-kolla-config\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.671258 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/22215216-efac-4810-90f1-4d42ccc6399c-config-data-default\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.671843 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22215216-efac-4810-90f1-4d42ccc6399c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.679678 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22215216-efac-4810-90f1-4d42ccc6399c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.686796 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.688045 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-899vq\" (UniqueName: \"kubernetes.io/projected/22215216-efac-4810-90f1-4d42ccc6399c-kube-api-access-899vq\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.688247 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/22215216-efac-4810-90f1-4d42ccc6399c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"22215216-efac-4810-90f1-4d42ccc6399c\") " pod="openstack/openstack-galera-0" Nov 24 01:27:06 crc kubenswrapper[4755]: I1124 01:27:06.817445 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.536191 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" event={"ID":"88fbee01-1663-4ae9-9776-84ad70bfc066","Type":"ContainerStarted","Data":"21ad5199fa54bd65e0db5e0e84bfc5456fc7edfd7a09c60aac5ecdfc56ff0c00"} Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.840916 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.842504 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.845959 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-7xg8c" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.846180 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.846291 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.849125 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.850830 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.895371 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b8f9a57-22fa-4115-942f-e6f7343a78e4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.895417 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b8f9a57-22fa-4115-942f-e6f7343a78e4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.895448 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0b8f9a57-22fa-4115-942f-e6f7343a78e4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.895494 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqvg2\" (UniqueName: \"kubernetes.io/projected/0b8f9a57-22fa-4115-942f-e6f7343a78e4-kube-api-access-sqvg2\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.895532 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0b8f9a57-22fa-4115-942f-e6f7343a78e4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.895558 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b8f9a57-22fa-4115-942f-e6f7343a78e4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.895580 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0b8f9a57-22fa-4115-942f-e6f7343a78e4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.895619 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.998155 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0b8f9a57-22fa-4115-942f-e6f7343a78e4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.998240 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b8f9a57-22fa-4115-942f-e6f7343a78e4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.998278 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0b8f9a57-22fa-4115-942f-e6f7343a78e4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.998398 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.998439 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b8f9a57-22fa-4115-942f-e6f7343a78e4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.998473 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b8f9a57-22fa-4115-942f-e6f7343a78e4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.998515 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0b8f9a57-22fa-4115-942f-e6f7343a78e4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.998581 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqvg2\" (UniqueName: \"kubernetes.io/projected/0b8f9a57-22fa-4115-942f-e6f7343a78e4-kube-api-access-sqvg2\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.998846 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.999341 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0b8f9a57-22fa-4115-942f-e6f7343a78e4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:07 crc kubenswrapper[4755]: I1124 01:27:07.999339 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0b8f9a57-22fa-4115-942f-e6f7343a78e4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:07.999980 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0b8f9a57-22fa-4115-942f-e6f7343a78e4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.000110 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b8f9a57-22fa-4115-942f-e6f7343a78e4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.003987 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b8f9a57-22fa-4115-942f-e6f7343a78e4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.005541 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b8f9a57-22fa-4115-942f-e6f7343a78e4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.023082 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqvg2\" (UniqueName: \"kubernetes.io/projected/0b8f9a57-22fa-4115-942f-e6f7343a78e4-kube-api-access-sqvg2\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.032744 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"0b8f9a57-22fa-4115-942f-e6f7343a78e4\") " pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.165754 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.174447 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.175642 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.177625 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-82rf4" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.178811 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.179991 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.188194 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.302901 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa97a0b2-add8-4532-ab38-d726de9f0a60-combined-ca-bundle\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.302984 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m6kz\" (UniqueName: \"kubernetes.io/projected/fa97a0b2-add8-4532-ab38-d726de9f0a60-kube-api-access-9m6kz\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.303059 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa97a0b2-add8-4532-ab38-d726de9f0a60-memcached-tls-certs\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.303199 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa97a0b2-add8-4532-ab38-d726de9f0a60-kolla-config\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.303286 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa97a0b2-add8-4532-ab38-d726de9f0a60-config-data\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.404135 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa97a0b2-add8-4532-ab38-d726de9f0a60-combined-ca-bundle\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.404211 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m6kz\" (UniqueName: \"kubernetes.io/projected/fa97a0b2-add8-4532-ab38-d726de9f0a60-kube-api-access-9m6kz\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.404251 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa97a0b2-add8-4532-ab38-d726de9f0a60-memcached-tls-certs\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.404285 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa97a0b2-add8-4532-ab38-d726de9f0a60-kolla-config\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.404366 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa97a0b2-add8-4532-ab38-d726de9f0a60-config-data\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.405366 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa97a0b2-add8-4532-ab38-d726de9f0a60-config-data\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.405456 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa97a0b2-add8-4532-ab38-d726de9f0a60-kolla-config\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.410841 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa97a0b2-add8-4532-ab38-d726de9f0a60-memcached-tls-certs\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.411137 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa97a0b2-add8-4532-ab38-d726de9f0a60-combined-ca-bundle\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.423142 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m6kz\" (UniqueName: \"kubernetes.io/projected/fa97a0b2-add8-4532-ab38-d726de9f0a60-kube-api-access-9m6kz\") pod \"memcached-0\" (UID: \"fa97a0b2-add8-4532-ab38-d726de9f0a60\") " pod="openstack/memcached-0" Nov 24 01:27:08 crc kubenswrapper[4755]: I1124 01:27:08.497464 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 24 01:27:09 crc kubenswrapper[4755]: I1124 01:27:09.941958 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 24 01:27:09 crc kubenswrapper[4755]: I1124 01:27:09.943285 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 24 01:27:09 crc kubenswrapper[4755]: I1124 01:27:09.951467 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-8mplf" Nov 24 01:27:09 crc kubenswrapper[4755]: I1124 01:27:09.960431 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 24 01:27:10 crc kubenswrapper[4755]: I1124 01:27:10.029122 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwzn9\" (UniqueName: \"kubernetes.io/projected/907891d0-296d-4b4b-a3f3-867979467a98-kube-api-access-cwzn9\") pod \"kube-state-metrics-0\" (UID: \"907891d0-296d-4b4b-a3f3-867979467a98\") " pod="openstack/kube-state-metrics-0" Nov 24 01:27:10 crc kubenswrapper[4755]: I1124 01:27:10.130256 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwzn9\" (UniqueName: \"kubernetes.io/projected/907891d0-296d-4b4b-a3f3-867979467a98-kube-api-access-cwzn9\") pod \"kube-state-metrics-0\" (UID: \"907891d0-296d-4b4b-a3f3-867979467a98\") " pod="openstack/kube-state-metrics-0" Nov 24 01:27:10 crc kubenswrapper[4755]: I1124 01:27:10.156472 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwzn9\" (UniqueName: \"kubernetes.io/projected/907891d0-296d-4b4b-a3f3-867979467a98-kube-api-access-cwzn9\") pod \"kube-state-metrics-0\" (UID: \"907891d0-296d-4b4b-a3f3-867979467a98\") " pod="openstack/kube-state-metrics-0" Nov 24 01:27:10 crc kubenswrapper[4755]: I1124 01:27:10.263692 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.275559 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-7tzgl"] Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.277988 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.279925 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-cksfc" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.280129 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.284360 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-7tzgl"] Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.285404 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.303689 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-khjj5"] Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.306870 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.309428 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-khjj5"] Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386230 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-etc-ovs\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386292 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-ovn-controller-tls-certs\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386339 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-var-log-ovn\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386364 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-var-log\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386383 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-var-lib\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386414 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shk6j\" (UniqueName: \"kubernetes.io/projected/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-kube-api-access-shk6j\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386445 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-var-run-ovn\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386472 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-var-run\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386494 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-combined-ca-bundle\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386518 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mprk5\" (UniqueName: \"kubernetes.io/projected/3e8faee1-2ae1-4f03-9379-d35e533f222d-kube-api-access-mprk5\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386552 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e8faee1-2ae1-4f03-9379-d35e533f222d-scripts\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386644 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-var-run\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.386672 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-scripts\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488018 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-var-run\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488068 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-scripts\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488095 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-etc-ovs\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488118 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-ovn-controller-tls-certs\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488143 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-var-log-ovn\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488158 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-var-lib\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488173 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-var-log\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488189 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shk6j\" (UniqueName: \"kubernetes.io/projected/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-kube-api-access-shk6j\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488212 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-var-run-ovn\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488232 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-var-run\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488252 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-combined-ca-bundle\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488268 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mprk5\" (UniqueName: \"kubernetes.io/projected/3e8faee1-2ae1-4f03-9379-d35e533f222d-kube-api-access-mprk5\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488292 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e8faee1-2ae1-4f03-9379-d35e533f222d-scripts\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488686 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-var-run\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.488865 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-var-log\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.489008 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-etc-ovs\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.489831 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-var-run\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.490177 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-var-run-ovn\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.490298 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-scripts\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.490330 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-var-log-ovn\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.490444 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3e8faee1-2ae1-4f03-9379-d35e533f222d-var-lib\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.490901 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e8faee1-2ae1-4f03-9379-d35e533f222d-scripts\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.514698 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-combined-ca-bundle\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.514712 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-ovn-controller-tls-certs\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.520667 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shk6j\" (UniqueName: \"kubernetes.io/projected/06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0-kube-api-access-shk6j\") pod \"ovn-controller-7tzgl\" (UID: \"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0\") " pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.521231 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mprk5\" (UniqueName: \"kubernetes.io/projected/3e8faee1-2ae1-4f03-9379-d35e533f222d-kube-api-access-mprk5\") pod \"ovn-controller-ovs-khjj5\" (UID: \"3e8faee1-2ae1-4f03-9379-d35e533f222d\") " pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.602199 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.624208 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.865430 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.867408 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.873324 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.873616 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.873712 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.873622 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.873918 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-7cqpv" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.879397 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.997630 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b8b513e-db84-49e7-88e5-b023b20bd604-config\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.997755 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b8b513e-db84-49e7-88e5-b023b20bd604-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.997832 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.997895 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzgjp\" (UniqueName: \"kubernetes.io/projected/9b8b513e-db84-49e7-88e5-b023b20bd604-kube-api-access-qzgjp\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.997941 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b8b513e-db84-49e7-88e5-b023b20bd604-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.997969 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b8b513e-db84-49e7-88e5-b023b20bd604-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.998046 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9b8b513e-db84-49e7-88e5-b023b20bd604-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:13 crc kubenswrapper[4755]: I1124 01:27:13.998089 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9b8b513e-db84-49e7-88e5-b023b20bd604-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.099228 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b8b513e-db84-49e7-88e5-b023b20bd604-config\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.099289 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b8b513e-db84-49e7-88e5-b023b20bd604-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.099345 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.099375 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzgjp\" (UniqueName: \"kubernetes.io/projected/9b8b513e-db84-49e7-88e5-b023b20bd604-kube-api-access-qzgjp\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.099395 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b8b513e-db84-49e7-88e5-b023b20bd604-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.099416 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b8b513e-db84-49e7-88e5-b023b20bd604-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.099456 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9b8b513e-db84-49e7-88e5-b023b20bd604-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.099472 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9b8b513e-db84-49e7-88e5-b023b20bd604-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.100330 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.100468 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9b8b513e-db84-49e7-88e5-b023b20bd604-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.100707 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b8b513e-db84-49e7-88e5-b023b20bd604-config\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.101552 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9b8b513e-db84-49e7-88e5-b023b20bd604-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.104665 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b8b513e-db84-49e7-88e5-b023b20bd604-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.108203 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b8b513e-db84-49e7-88e5-b023b20bd604-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.109324 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b8b513e-db84-49e7-88e5-b023b20bd604-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.116778 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzgjp\" (UniqueName: \"kubernetes.io/projected/9b8b513e-db84-49e7-88e5-b023b20bd604-kube-api-access-qzgjp\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.119155 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"9b8b513e-db84-49e7-88e5-b023b20bd604\") " pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:14 crc kubenswrapper[4755]: I1124 01:27:14.201050 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.193643 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.198111 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.203724 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-q8s5c" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.203895 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.204073 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.204324 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.204451 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.241791 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.241838 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fdb8eaf-1302-4fff-a38f-673a89890e64-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.241884 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fdb8eaf-1302-4fff-a38f-673a89890e64-config\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.242135 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fdb8eaf-1302-4fff-a38f-673a89890e64-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.242168 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1fdb8eaf-1302-4fff-a38f-673a89890e64-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.242206 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62zhc\" (UniqueName: \"kubernetes.io/projected/1fdb8eaf-1302-4fff-a38f-673a89890e64-kube-api-access-62zhc\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.242255 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1fdb8eaf-1302-4fff-a38f-673a89890e64-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.242291 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fdb8eaf-1302-4fff-a38f-673a89890e64-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.343539 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fdb8eaf-1302-4fff-a38f-673a89890e64-config\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.344314 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fdb8eaf-1302-4fff-a38f-673a89890e64-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.344631 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1fdb8eaf-1302-4fff-a38f-673a89890e64-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.344735 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fdb8eaf-1302-4fff-a38f-673a89890e64-config\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.344739 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62zhc\" (UniqueName: \"kubernetes.io/projected/1fdb8eaf-1302-4fff-a38f-673a89890e64-kube-api-access-62zhc\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.344855 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1fdb8eaf-1302-4fff-a38f-673a89890e64-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.344901 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fdb8eaf-1302-4fff-a38f-673a89890e64-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.344951 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.344976 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fdb8eaf-1302-4fff-a38f-673a89890e64-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.345386 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.345991 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1fdb8eaf-1302-4fff-a38f-673a89890e64-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.356315 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fdb8eaf-1302-4fff-a38f-673a89890e64-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.356991 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fdb8eaf-1302-4fff-a38f-673a89890e64-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.358685 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fdb8eaf-1302-4fff-a38f-673a89890e64-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.365360 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62zhc\" (UniqueName: \"kubernetes.io/projected/1fdb8eaf-1302-4fff-a38f-673a89890e64-kube-api-access-62zhc\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.372629 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.381740 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1fdb8eaf-1302-4fff-a38f-673a89890e64-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1fdb8eaf-1302-4fff-a38f-673a89890e64\") " pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:17 crc kubenswrapper[4755]: I1124 01:27:17.561829 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:18 crc kubenswrapper[4755]: E1124 01:27:18.048738 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Nov 24 01:27:18 crc kubenswrapper[4755]: E1124 01:27:18.049234 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7tvln,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-mn9d6_openstack(1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:27:18 crc kubenswrapper[4755]: E1124 01:27:18.050499 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" podUID="1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7" Nov 24 01:27:18 crc kubenswrapper[4755]: E1124 01:27:18.071936 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Nov 24 01:27:18 crc kubenswrapper[4755]: E1124 01:27:18.072489 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fpvd2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-nmftn_openstack(61197ade-de23-4903-bb62-438e07080dbd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:27:18 crc kubenswrapper[4755]: E1124 01:27:18.073729 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" podUID="61197ade-de23-4903-bb62-438e07080dbd" Nov 24 01:27:18 crc kubenswrapper[4755]: I1124 01:27:18.632144 4755 generic.go:334] "Generic (PLEG): container finished" podID="88fbee01-1663-4ae9-9776-84ad70bfc066" containerID="58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e" exitCode=0 Nov 24 01:27:18 crc kubenswrapper[4755]: I1124 01:27:18.632979 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" event={"ID":"88fbee01-1663-4ae9-9776-84ad70bfc066","Type":"ContainerDied","Data":"58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e"} Nov 24 01:27:18 crc kubenswrapper[4755]: I1124 01:27:18.635370 4755 generic.go:334] "Generic (PLEG): container finished" podID="a6a2b41a-a90f-4468-9df6-d81d680baa4b" containerID="66ae556633beb5cae195bfe7128ba7333e7988ea68d3eb51368e4a2fc5c38b16" exitCode=0 Nov 24 01:27:18 crc kubenswrapper[4755]: I1124 01:27:18.635534 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" event={"ID":"a6a2b41a-a90f-4468-9df6-d81d680baa4b","Type":"ContainerDied","Data":"66ae556633beb5cae195bfe7128ba7333e7988ea68d3eb51368e4a2fc5c38b16"} Nov 24 01:27:18 crc kubenswrapper[4755]: I1124 01:27:18.718883 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 24 01:27:18 crc kubenswrapper[4755]: I1124 01:27:18.738871 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 24 01:27:18 crc kubenswrapper[4755]: I1124 01:27:18.744626 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 24 01:27:18 crc kubenswrapper[4755]: I1124 01:27:18.751337 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 01:27:18 crc kubenswrapper[4755]: W1124 01:27:18.759234 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22215216_efac_4810_90f1_4d42ccc6399c.slice/crio-fdce2f9300e570539f840b7465625762aa95102f30e2103b56f1e1ba3d527748 WatchSource:0}: Error finding container fdce2f9300e570539f840b7465625762aa95102f30e2103b56f1e1ba3d527748: Status 404 returned error can't find the container with id fdce2f9300e570539f840b7465625762aa95102f30e2103b56f1e1ba3d527748 Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.087076 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.088862 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.102170 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.102410 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 24 01:27:19 crc kubenswrapper[4755]: W1124 01:27:19.115210 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa97a0b2_add8_4532_ab38_d726de9f0a60.slice/crio-8f875e46ec7064337a9ab581e20c3ce8261c8fe85620f0eb863116b20de9a481 WatchSource:0}: Error finding container 8f875e46ec7064337a9ab581e20c3ce8261c8fe85620f0eb863116b20de9a481: Status 404 returned error can't find the container with id 8f875e46ec7064337a9ab581e20c3ce8261c8fe85620f0eb863116b20de9a481 Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.135819 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.150048 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-7tzgl"] Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.175243 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-config\") pod \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.175357 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tvln\" (UniqueName: \"kubernetes.io/projected/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-kube-api-access-7tvln\") pod \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.175449 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-dns-svc\") pod \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\" (UID: \"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7\") " Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.175866 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-config" (OuterVolumeSpecName: "config") pod "1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7" (UID: "1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.176743 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7" (UID: "1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.179804 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-kube-api-access-7tvln" (OuterVolumeSpecName: "kube-api-access-7tvln") pod "1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7" (UID: "1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7"). InnerVolumeSpecName "kube-api-access-7tvln". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.276436 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpvd2\" (UniqueName: \"kubernetes.io/projected/61197ade-de23-4903-bb62-438e07080dbd-kube-api-access-fpvd2\") pod \"61197ade-de23-4903-bb62-438e07080dbd\" (UID: \"61197ade-de23-4903-bb62-438e07080dbd\") " Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.276566 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61197ade-de23-4903-bb62-438e07080dbd-config\") pod \"61197ade-de23-4903-bb62-438e07080dbd\" (UID: \"61197ade-de23-4903-bb62-438e07080dbd\") " Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.276971 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tvln\" (UniqueName: \"kubernetes.io/projected/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-kube-api-access-7tvln\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.276992 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.277003 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.277339 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61197ade-de23-4903-bb62-438e07080dbd-config" (OuterVolumeSpecName: "config") pod "61197ade-de23-4903-bb62-438e07080dbd" (UID: "61197ade-de23-4903-bb62-438e07080dbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.280425 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61197ade-de23-4903-bb62-438e07080dbd-kube-api-access-fpvd2" (OuterVolumeSpecName: "kube-api-access-fpvd2") pod "61197ade-de23-4903-bb62-438e07080dbd" (UID: "61197ade-de23-4903-bb62-438e07080dbd"). InnerVolumeSpecName "kube-api-access-fpvd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.379023 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61197ade-de23-4903-bb62-438e07080dbd-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.379337 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpvd2\" (UniqueName: \"kubernetes.io/projected/61197ade-de23-4903-bb62-438e07080dbd-kube-api-access-fpvd2\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.440445 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 24 01:27:19 crc kubenswrapper[4755]: W1124 01:27:19.444248 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b8b513e_db84_49e7_88e5_b023b20bd604.slice/crio-b91eb5d3ef35729232834f3003be68720bc50ae7629e41226acff573fb0e767c WatchSource:0}: Error finding container b91eb5d3ef35729232834f3003be68720bc50ae7629e41226acff573fb0e767c: Status 404 returned error can't find the container with id b91eb5d3ef35729232834f3003be68720bc50ae7629e41226acff573fb0e767c Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.644429 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"907891d0-296d-4b4b-a3f3-867979467a98","Type":"ContainerStarted","Data":"afda6f8db1d66ff731d489ae5f5c2f8026bb97f42cd3745e3f28ac71ca3d0f0e"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.647381 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" event={"ID":"a6a2b41a-a90f-4468-9df6-d81d680baa4b","Type":"ContainerStarted","Data":"24cb744c328dac81fa43959f08f99d41986c385e5e7cd7ae15137f70a798ec6e"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.647544 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.649202 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.649635 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-nmftn" event={"ID":"61197ade-de23-4903-bb62-438e07080dbd","Type":"ContainerDied","Data":"44837af499a730391a424139e9a9648bbb7881a84136a366cbcefb9a1edfd4e1"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.652072 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"20a66507-c5f4-43d2-a99b-18daaffea30f","Type":"ContainerStarted","Data":"28f549736fa37cb8177eef2aa573ddffb24e3240a66e3c3f3bb3369c7fea986f"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.654070 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9b8b513e-db84-49e7-88e5-b023b20bd604","Type":"ContainerStarted","Data":"b91eb5d3ef35729232834f3003be68720bc50ae7629e41226acff573fb0e767c"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.655702 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7tzgl" event={"ID":"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0","Type":"ContainerStarted","Data":"664147e797a1024f7221ff3c0a51b630547b721e9999e8cb9d81d9c17d63c2fd"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.657180 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed","Type":"ContainerStarted","Data":"74cde227ef5aca9f07445f6af776d7c21b58982666cbb772844715c08e07d776"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.658499 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"0b8f9a57-22fa-4115-942f-e6f7343a78e4","Type":"ContainerStarted","Data":"3fa39dde851ac429e738479029d032cbfdcd6bb31a5ca2b7d55de8280ea008e1"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.661371 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" event={"ID":"1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7","Type":"ContainerDied","Data":"7bfbeab98f928a767a345d593c8c600a813216347702672c5f988eb69eb74217"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.661438 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mn9d6" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.673254 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" podStartSLOduration=2.714874795 podStartE2EDuration="16.673237292s" podCreationTimestamp="2025-11-24 01:27:03 +0000 UTC" firstStartedPulling="2025-11-24 01:27:04.225481006 +0000 UTC m=+848.911546507" lastFinishedPulling="2025-11-24 01:27:18.183843493 +0000 UTC m=+862.869909004" observedRunningTime="2025-11-24 01:27:19.670018121 +0000 UTC m=+864.356083642" watchObservedRunningTime="2025-11-24 01:27:19.673237292 +0000 UTC m=+864.359302793" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.678830 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"22215216-efac-4810-90f1-4d42ccc6399c","Type":"ContainerStarted","Data":"fdce2f9300e570539f840b7465625762aa95102f30e2103b56f1e1ba3d527748"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.680679 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"fa97a0b2-add8-4532-ab38-d726de9f0a60","Type":"ContainerStarted","Data":"8f875e46ec7064337a9ab581e20c3ce8261c8fe85620f0eb863116b20de9a481"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.682663 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" event={"ID":"88fbee01-1663-4ae9-9776-84ad70bfc066","Type":"ContainerStarted","Data":"e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9"} Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.683362 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.726693 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nmftn"] Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.731454 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nmftn"] Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.748340 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" podStartSLOduration=5.967214881 podStartE2EDuration="16.748322785s" podCreationTimestamp="2025-11-24 01:27:03 +0000 UTC" firstStartedPulling="2025-11-24 01:27:07.430940293 +0000 UTC m=+852.117005814" lastFinishedPulling="2025-11-24 01:27:18.212048217 +0000 UTC m=+862.898113718" observedRunningTime="2025-11-24 01:27:19.72221623 +0000 UTC m=+864.408281741" watchObservedRunningTime="2025-11-24 01:27:19.748322785 +0000 UTC m=+864.434388286" Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.759986 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mn9d6"] Nov 24 01:27:19 crc kubenswrapper[4755]: I1124 01:27:19.765058 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mn9d6"] Nov 24 01:27:20 crc kubenswrapper[4755]: I1124 01:27:20.007741 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7" path="/var/lib/kubelet/pods/1a3ac5d9-abf5-430d-9d26-e6a308a6b1d7/volumes" Nov 24 01:27:20 crc kubenswrapper[4755]: I1124 01:27:20.008312 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61197ade-de23-4903-bb62-438e07080dbd" path="/var/lib/kubelet/pods/61197ade-de23-4903-bb62-438e07080dbd/volumes" Nov 24 01:27:20 crc kubenswrapper[4755]: I1124 01:27:20.400513 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 24 01:27:20 crc kubenswrapper[4755]: I1124 01:27:20.502074 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-khjj5"] Nov 24 01:27:20 crc kubenswrapper[4755]: W1124 01:27:20.918625 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fdb8eaf_1302_4fff_a38f_673a89890e64.slice/crio-41960ec9349e5339919eb729f621853210373142fab21ceb587bb420d433cbf9 WatchSource:0}: Error finding container 41960ec9349e5339919eb729f621853210373142fab21ceb587bb420d433cbf9: Status 404 returned error can't find the container with id 41960ec9349e5339919eb729f621853210373142fab21ceb587bb420d433cbf9 Nov 24 01:27:21 crc kubenswrapper[4755]: I1124 01:27:21.700438 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1fdb8eaf-1302-4fff-a38f-673a89890e64","Type":"ContainerStarted","Data":"41960ec9349e5339919eb729f621853210373142fab21ceb587bb420d433cbf9"} Nov 24 01:27:23 crc kubenswrapper[4755]: I1124 01:27:23.719768 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:23 crc kubenswrapper[4755]: I1124 01:27:23.995844 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:24 crc kubenswrapper[4755]: I1124 01:27:24.176551 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rw7mx"] Nov 24 01:27:24 crc kubenswrapper[4755]: I1124 01:27:24.719352 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" podUID="a6a2b41a-a90f-4468-9df6-d81d680baa4b" containerName="dnsmasq-dns" containerID="cri-o://24cb744c328dac81fa43959f08f99d41986c385e5e7cd7ae15137f70a798ec6e" gracePeriod=10 Nov 24 01:27:25 crc kubenswrapper[4755]: W1124 01:27:25.455823 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e8faee1_2ae1_4f03_9379_d35e533f222d.slice/crio-4e61a0450dc1aff99f402b4627676417817b220aea3b7dac77d17b90a0c0adae WatchSource:0}: Error finding container 4e61a0450dc1aff99f402b4627676417817b220aea3b7dac77d17b90a0c0adae: Status 404 returned error can't find the container with id 4e61a0450dc1aff99f402b4627676417817b220aea3b7dac77d17b90a0c0adae Nov 24 01:27:25 crc kubenswrapper[4755]: I1124 01:27:25.727803 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-khjj5" event={"ID":"3e8faee1-2ae1-4f03-9379-d35e533f222d","Type":"ContainerStarted","Data":"4e61a0450dc1aff99f402b4627676417817b220aea3b7dac77d17b90a0c0adae"} Nov 24 01:27:25 crc kubenswrapper[4755]: I1124 01:27:25.730580 4755 generic.go:334] "Generic (PLEG): container finished" podID="a6a2b41a-a90f-4468-9df6-d81d680baa4b" containerID="24cb744c328dac81fa43959f08f99d41986c385e5e7cd7ae15137f70a798ec6e" exitCode=0 Nov 24 01:27:25 crc kubenswrapper[4755]: I1124 01:27:25.730643 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" event={"ID":"a6a2b41a-a90f-4468-9df6-d81d680baa4b","Type":"ContainerDied","Data":"24cb744c328dac81fa43959f08f99d41986c385e5e7cd7ae15137f70a798ec6e"} Nov 24 01:27:28 crc kubenswrapper[4755]: I1124 01:27:28.909940 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.031429 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-dns-svc\") pod \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.031536 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-config\") pod \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.031652 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tfbg\" (UniqueName: \"kubernetes.io/projected/a6a2b41a-a90f-4468-9df6-d81d680baa4b-kube-api-access-9tfbg\") pod \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\" (UID: \"a6a2b41a-a90f-4468-9df6-d81d680baa4b\") " Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.037415 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6a2b41a-a90f-4468-9df6-d81d680baa4b-kube-api-access-9tfbg" (OuterVolumeSpecName: "kube-api-access-9tfbg") pod "a6a2b41a-a90f-4468-9df6-d81d680baa4b" (UID: "a6a2b41a-a90f-4468-9df6-d81d680baa4b"). InnerVolumeSpecName "kube-api-access-9tfbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.079234 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-config" (OuterVolumeSpecName: "config") pod "a6a2b41a-a90f-4468-9df6-d81d680baa4b" (UID: "a6a2b41a-a90f-4468-9df6-d81d680baa4b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.081374 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a6a2b41a-a90f-4468-9df6-d81d680baa4b" (UID: "a6a2b41a-a90f-4468-9df6-d81d680baa4b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.133435 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.133465 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a2b41a-a90f-4468-9df6-d81d680baa4b-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.133475 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tfbg\" (UniqueName: \"kubernetes.io/projected/a6a2b41a-a90f-4468-9df6-d81d680baa4b-kube-api-access-9tfbg\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.768329 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" event={"ID":"a6a2b41a-a90f-4468-9df6-d81d680baa4b","Type":"ContainerDied","Data":"dd53e334b4055785f8375d111adf52d1c4ca8ae1927e87bfdf2f27a9ff23ead7"} Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.768390 4755 scope.go:117] "RemoveContainer" containerID="24cb744c328dac81fa43959f08f99d41986c385e5e7cd7ae15137f70a798ec6e" Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.768401 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.811631 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rw7mx"] Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.818298 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rw7mx"] Nov 24 01:27:29 crc kubenswrapper[4755]: I1124 01:27:29.821070 4755 scope.go:117] "RemoveContainer" containerID="66ae556633beb5cae195bfe7128ba7333e7988ea68d3eb51368e4a2fc5c38b16" Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.006067 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6a2b41a-a90f-4468-9df6-d81d680baa4b" path="/var/lib/kubelet/pods/a6a2b41a-a90f-4468-9df6-d81d680baa4b/volumes" Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.774902 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-khjj5" event={"ID":"3e8faee1-2ae1-4f03-9379-d35e533f222d","Type":"ContainerStarted","Data":"d757082ac824df08d7c7202eb0570f2bb77937f229224ee227aba626e8d86863"} Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.777018 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7tzgl" event={"ID":"06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0","Type":"ContainerStarted","Data":"243f8514d291205f5189f816a13581431dc688756f3d6c51c221722d61f1c7c5"} Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.777642 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-7tzgl" Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.779328 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9b8b513e-db84-49e7-88e5-b023b20bd604","Type":"ContainerStarted","Data":"922cb09e6ccebf85d10b5fa3fbfb773e232cdba6b950e14a91dbd5fc765da6e6"} Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.780872 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"0b8f9a57-22fa-4115-942f-e6f7343a78e4","Type":"ContainerStarted","Data":"0e112c6742f6c327b514aa21a93329a5160c5710e62ae08928942493c8c0f3d6"} Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.782764 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1fdb8eaf-1302-4fff-a38f-673a89890e64","Type":"ContainerStarted","Data":"1e50b2be1d86ac2f22b159126f3b40092318f9425bbb5481444b626542747c2f"} Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.784154 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"907891d0-296d-4b4b-a3f3-867979467a98","Type":"ContainerStarted","Data":"4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0"} Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.784500 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.786098 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"22215216-efac-4810-90f1-4d42ccc6399c","Type":"ContainerStarted","Data":"6c54259e39f0345b428ad2641312e58875e0ea10c9eafaa58677d037f2d76345"} Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.787743 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"fa97a0b2-add8-4532-ab38-d726de9f0a60","Type":"ContainerStarted","Data":"be307f1527197f2e4d79db370387ab5cfc115ff696e8b9fb5c96401acd08581d"} Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.788110 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.827433 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=12.614941769 podStartE2EDuration="22.827414283s" podCreationTimestamp="2025-11-24 01:27:08 +0000 UTC" firstStartedPulling="2025-11-24 01:27:19.120955472 +0000 UTC m=+863.807020993" lastFinishedPulling="2025-11-24 01:27:29.333427996 +0000 UTC m=+874.019493507" observedRunningTime="2025-11-24 01:27:30.813993156 +0000 UTC m=+875.500058667" watchObservedRunningTime="2025-11-24 01:27:30.827414283 +0000 UTC m=+875.513479784" Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.854583 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-7tzgl" podStartSLOduration=7.176533523 podStartE2EDuration="17.854565277s" podCreationTimestamp="2025-11-24 01:27:13 +0000 UTC" firstStartedPulling="2025-11-24 01:27:19.141958413 +0000 UTC m=+863.828023914" lastFinishedPulling="2025-11-24 01:27:29.819990157 +0000 UTC m=+874.506055668" observedRunningTime="2025-11-24 01:27:30.852508799 +0000 UTC m=+875.538574321" watchObservedRunningTime="2025-11-24 01:27:30.854565277 +0000 UTC m=+875.540630788" Nov 24 01:27:30 crc kubenswrapper[4755]: I1124 01:27:30.856227 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=10.69159692 podStartE2EDuration="21.856212904s" podCreationTimestamp="2025-11-24 01:27:09 +0000 UTC" firstStartedPulling="2025-11-24 01:27:19.109220422 +0000 UTC m=+863.795285923" lastFinishedPulling="2025-11-24 01:27:30.273836396 +0000 UTC m=+874.959901907" observedRunningTime="2025-11-24 01:27:30.833238677 +0000 UTC m=+875.519304188" watchObservedRunningTime="2025-11-24 01:27:30.856212904 +0000 UTC m=+875.542278425" Nov 24 01:27:31 crc kubenswrapper[4755]: I1124 01:27:31.799438 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"20a66507-c5f4-43d2-a99b-18daaffea30f","Type":"ContainerStarted","Data":"01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563"} Nov 24 01:27:31 crc kubenswrapper[4755]: I1124 01:27:31.802321 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed","Type":"ContainerStarted","Data":"5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca"} Nov 24 01:27:31 crc kubenswrapper[4755]: I1124 01:27:31.804420 4755 generic.go:334] "Generic (PLEG): container finished" podID="3e8faee1-2ae1-4f03-9379-d35e533f222d" containerID="d757082ac824df08d7c7202eb0570f2bb77937f229224ee227aba626e8d86863" exitCode=0 Nov 24 01:27:31 crc kubenswrapper[4755]: I1124 01:27:31.804620 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-khjj5" event={"ID":"3e8faee1-2ae1-4f03-9379-d35e533f222d","Type":"ContainerDied","Data":"d757082ac824df08d7c7202eb0570f2bb77937f229224ee227aba626e8d86863"} Nov 24 01:27:32 crc kubenswrapper[4755]: I1124 01:27:32.821132 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-khjj5" event={"ID":"3e8faee1-2ae1-4f03-9379-d35e533f222d","Type":"ContainerStarted","Data":"2e4cec75c0549f9721153abfdab78b924bfd4cb85bc536416d7f072571317d79"} Nov 24 01:27:32 crc kubenswrapper[4755]: I1124 01:27:32.821464 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-khjj5" event={"ID":"3e8faee1-2ae1-4f03-9379-d35e533f222d","Type":"ContainerStarted","Data":"d208ae6651698895a710aebf3678acd60493ddabaf2decc4614ac254307de5e2"} Nov 24 01:27:32 crc kubenswrapper[4755]: I1124 01:27:32.821707 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:32 crc kubenswrapper[4755]: I1124 01:27:32.821740 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:27:33 crc kubenswrapper[4755]: I1124 01:27:33.719488 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-666b6646f7-rw7mx" podUID="a6a2b41a-a90f-4468-9df6-d81d680baa4b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.98:5353: i/o timeout" Nov 24 01:27:34 crc kubenswrapper[4755]: I1124 01:27:34.840088 4755 generic.go:334] "Generic (PLEG): container finished" podID="22215216-efac-4810-90f1-4d42ccc6399c" containerID="6c54259e39f0345b428ad2641312e58875e0ea10c9eafaa58677d037f2d76345" exitCode=0 Nov 24 01:27:34 crc kubenswrapper[4755]: I1124 01:27:34.840197 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"22215216-efac-4810-90f1-4d42ccc6399c","Type":"ContainerDied","Data":"6c54259e39f0345b428ad2641312e58875e0ea10c9eafaa58677d037f2d76345"} Nov 24 01:27:34 crc kubenswrapper[4755]: I1124 01:27:34.843438 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"9b8b513e-db84-49e7-88e5-b023b20bd604","Type":"ContainerStarted","Data":"8e423940f902f0e05ceaa2209a5702644ab08bfe00babdd76395aec89f139e77"} Nov 24 01:27:34 crc kubenswrapper[4755]: I1124 01:27:34.848135 4755 generic.go:334] "Generic (PLEG): container finished" podID="0b8f9a57-22fa-4115-942f-e6f7343a78e4" containerID="0e112c6742f6c327b514aa21a93329a5160c5710e62ae08928942493c8c0f3d6" exitCode=0 Nov 24 01:27:34 crc kubenswrapper[4755]: I1124 01:27:34.848178 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"0b8f9a57-22fa-4115-942f-e6f7343a78e4","Type":"ContainerDied","Data":"0e112c6742f6c327b514aa21a93329a5160c5710e62ae08928942493c8c0f3d6"} Nov 24 01:27:34 crc kubenswrapper[4755]: I1124 01:27:34.850461 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1fdb8eaf-1302-4fff-a38f-673a89890e64","Type":"ContainerStarted","Data":"b7f1c362b72a1423d721de71a4e8311aceef4046602f9fd5212c536a449651cd"} Nov 24 01:27:34 crc kubenswrapper[4755]: I1124 01:27:34.879511 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-khjj5" podStartSLOduration=17.518032828 podStartE2EDuration="21.879489103s" podCreationTimestamp="2025-11-24 01:27:13 +0000 UTC" firstStartedPulling="2025-11-24 01:27:25.457632836 +0000 UTC m=+870.143698337" lastFinishedPulling="2025-11-24 01:27:29.819089111 +0000 UTC m=+874.505154612" observedRunningTime="2025-11-24 01:27:32.849974426 +0000 UTC m=+877.536039937" watchObservedRunningTime="2025-11-24 01:27:34.879489103 +0000 UTC m=+879.565554614" Nov 24 01:27:34 crc kubenswrapper[4755]: I1124 01:27:34.896443 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=5.538562839 podStartE2EDuration="18.896420389s" podCreationTimestamp="2025-11-24 01:27:16 +0000 UTC" firstStartedPulling="2025-11-24 01:27:20.92057612 +0000 UTC m=+865.606641621" lastFinishedPulling="2025-11-24 01:27:34.27843367 +0000 UTC m=+878.964499171" observedRunningTime="2025-11-24 01:27:34.896292175 +0000 UTC m=+879.582357806" watchObservedRunningTime="2025-11-24 01:27:34.896420389 +0000 UTC m=+879.582485900" Nov 24 01:27:34 crc kubenswrapper[4755]: I1124 01:27:34.954044 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=8.119820839 podStartE2EDuration="22.95402418s" podCreationTimestamp="2025-11-24 01:27:12 +0000 UTC" firstStartedPulling="2025-11-24 01:27:19.44819635 +0000 UTC m=+864.134261851" lastFinishedPulling="2025-11-24 01:27:34.282399691 +0000 UTC m=+878.968465192" observedRunningTime="2025-11-24 01:27:34.948810613 +0000 UTC m=+879.634876134" watchObservedRunningTime="2025-11-24 01:27:34.95402418 +0000 UTC m=+879.640089681" Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.202053 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.255582 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.563156 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.626343 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.859330 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"0b8f9a57-22fa-4115-942f-e6f7343a78e4","Type":"ContainerStarted","Data":"c25e87301f76f79d9f0a76799e2df8a9b348ab8fecc62222de11f1fea5c8843a"} Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.861638 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"22215216-efac-4810-90f1-4d42ccc6399c","Type":"ContainerStarted","Data":"fa704eb1b0abede6dfe649ea9fdf3da697923d7328371efbe83200718bd84bc3"} Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.862129 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.862171 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.893487 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=19.188935456 podStartE2EDuration="29.893466515s" podCreationTimestamp="2025-11-24 01:27:06 +0000 UTC" firstStartedPulling="2025-11-24 01:27:19.115585061 +0000 UTC m=+863.801650552" lastFinishedPulling="2025-11-24 01:27:29.82011609 +0000 UTC m=+874.506181611" observedRunningTime="2025-11-24 01:27:35.889594716 +0000 UTC m=+880.575660227" watchObservedRunningTime="2025-11-24 01:27:35.893466515 +0000 UTC m=+880.579532016" Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.901710 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.906358 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Nov 24 01:27:35 crc kubenswrapper[4755]: I1124 01:27:35.913990 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=20.235108275 podStartE2EDuration="30.913974142s" podCreationTimestamp="2025-11-24 01:27:05 +0000 UTC" firstStartedPulling="2025-11-24 01:27:18.761399555 +0000 UTC m=+863.447465056" lastFinishedPulling="2025-11-24 01:27:29.440265422 +0000 UTC m=+874.126330923" observedRunningTime="2025-11-24 01:27:35.910960627 +0000 UTC m=+880.597026138" watchObservedRunningTime="2025-11-24 01:27:35.913974142 +0000 UTC m=+880.600039653" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.105553 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-7qk89"] Nov 24 01:27:36 crc kubenswrapper[4755]: E1124 01:27:36.106099 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6a2b41a-a90f-4468-9df6-d81d680baa4b" containerName="dnsmasq-dns" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.106172 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6a2b41a-a90f-4468-9df6-d81d680baa4b" containerName="dnsmasq-dns" Nov 24 01:27:36 crc kubenswrapper[4755]: E1124 01:27:36.106243 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6a2b41a-a90f-4468-9df6-d81d680baa4b" containerName="init" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.106302 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6a2b41a-a90f-4468-9df6-d81d680baa4b" containerName="init" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.106484 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6a2b41a-a90f-4468-9df6-d81d680baa4b" containerName="dnsmasq-dns" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.107303 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.109224 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.122374 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-7qk89"] Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.155907 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rx2j2\" (UniqueName: \"kubernetes.io/projected/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-kube-api-access-rx2j2\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.155993 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-config\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.156080 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.156265 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.257157 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-config\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.257190 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.257245 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.257301 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rx2j2\" (UniqueName: \"kubernetes.io/projected/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-kube-api-access-rx2j2\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.258331 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.258409 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.258776 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-config\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.269985 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-8kczp"] Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.271377 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.276127 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.284324 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rx2j2\" (UniqueName: \"kubernetes.io/projected/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-kube-api-access-rx2j2\") pod \"dnsmasq-dns-7f896c8c65-7qk89\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.297934 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-8kczp"] Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.359537 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/509f3153-a59d-4614-a753-8cd8df81734c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.359590 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509f3153-a59d-4614-a753-8cd8df81734c-combined-ca-bundle\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.359649 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/509f3153-a59d-4614-a753-8cd8df81734c-config\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.359868 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlkvp\" (UniqueName: \"kubernetes.io/projected/509f3153-a59d-4614-a753-8cd8df81734c-kube-api-access-rlkvp\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.359893 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/509f3153-a59d-4614-a753-8cd8df81734c-ovn-rundir\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.359930 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/509f3153-a59d-4614-a753-8cd8df81734c-ovs-rundir\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.426618 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.459848 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-7qk89"] Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.467012 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/509f3153-a59d-4614-a753-8cd8df81734c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.467070 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509f3153-a59d-4614-a753-8cd8df81734c-combined-ca-bundle\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.467136 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/509f3153-a59d-4614-a753-8cd8df81734c-config\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.467183 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlkvp\" (UniqueName: \"kubernetes.io/projected/509f3153-a59d-4614-a753-8cd8df81734c-kube-api-access-rlkvp\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.467214 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/509f3153-a59d-4614-a753-8cd8df81734c-ovn-rundir\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.467284 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/509f3153-a59d-4614-a753-8cd8df81734c-ovs-rundir\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.467577 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-znqch"] Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.467896 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/509f3153-a59d-4614-a753-8cd8df81734c-ovs-rundir\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.468810 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.469119 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/509f3153-a59d-4614-a753-8cd8df81734c-ovn-rundir\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.469386 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/509f3153-a59d-4614-a753-8cd8df81734c-config\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: W1124 01:27:36.477980 4755 reflector.go:561] object-"openstack"/"ovsdbserver-nb": failed to list *v1.ConfigMap: configmaps "ovsdbserver-nb" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Nov 24 01:27:36 crc kubenswrapper[4755]: E1124 01:27:36.478038 4755 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"ovsdbserver-nb\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"ovsdbserver-nb\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.490591 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.492372 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.497326 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlkvp\" (UniqueName: \"kubernetes.io/projected/509f3153-a59d-4614-a753-8cd8df81734c-kube-api-access-rlkvp\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.510146 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.510242 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/509f3153-a59d-4614-a753-8cd8df81734c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.510471 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509f3153-a59d-4614-a753-8cd8df81734c-combined-ca-bundle\") pod \"ovn-controller-metrics-8kczp\" (UID: \"509f3153-a59d-4614-a753-8cd8df81734c\") " pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.510716 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-4t8tr" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.510970 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.515340 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.517294 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-znqch"] Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.544682 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.578870 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c5a9efb-a4cf-4485-b3bd-972318209141-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.578921 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-config\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.578960 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c5a9efb-a4cf-4485-b3bd-972318209141-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.578979 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.578996 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2bnw\" (UniqueName: \"kubernetes.io/projected/7c5a9efb-a4cf-4485-b3bd-972318209141-kube-api-access-v2bnw\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.579014 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c5a9efb-a4cf-4485-b3bd-972318209141-scripts\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.579097 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7c5a9efb-a4cf-4485-b3bd-972318209141-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.579159 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.579201 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c5a9efb-a4cf-4485-b3bd-972318209141-config\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.579288 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c5a9efb-a4cf-4485-b3bd-972318209141-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.579363 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcdsl\" (UniqueName: \"kubernetes.io/projected/171cc508-1caf-4228-8e5b-6f34017960e2-kube-api-access-wcdsl\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.579450 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.629006 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-8kczp" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681294 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681369 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c5a9efb-a4cf-4485-b3bd-972318209141-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681414 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-config\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681469 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c5a9efb-a4cf-4485-b3bd-972318209141-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681498 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681530 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2bnw\" (UniqueName: \"kubernetes.io/projected/7c5a9efb-a4cf-4485-b3bd-972318209141-kube-api-access-v2bnw\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681557 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c5a9efb-a4cf-4485-b3bd-972318209141-scripts\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681586 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7c5a9efb-a4cf-4485-b3bd-972318209141-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681631 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681661 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c5a9efb-a4cf-4485-b3bd-972318209141-config\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681710 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c5a9efb-a4cf-4485-b3bd-972318209141-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.681760 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcdsl\" (UniqueName: \"kubernetes.io/projected/171cc508-1caf-4228-8e5b-6f34017960e2-kube-api-access-wcdsl\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.682910 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-config\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.683038 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7c5a9efb-a4cf-4485-b3bd-972318209141-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.683758 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.683769 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c5a9efb-a4cf-4485-b3bd-972318209141-config\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.683837 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7c5a9efb-a4cf-4485-b3bd-972318209141-scripts\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.685483 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.688444 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c5a9efb-a4cf-4485-b3bd-972318209141-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.689525 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c5a9efb-a4cf-4485-b3bd-972318209141-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.696081 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c5a9efb-a4cf-4485-b3bd-972318209141-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.698800 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcdsl\" (UniqueName: \"kubernetes.io/projected/171cc508-1caf-4228-8e5b-6f34017960e2-kube-api-access-wcdsl\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.706433 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2bnw\" (UniqueName: \"kubernetes.io/projected/7c5a9efb-a4cf-4485-b3bd-972318209141-kube-api-access-v2bnw\") pod \"ovn-northd-0\" (UID: \"7c5a9efb-a4cf-4485-b3bd-972318209141\") " pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.818840 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.818895 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.919517 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 24 01:27:36 crc kubenswrapper[4755]: I1124 01:27:36.967350 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-7qk89"] Nov 24 01:27:36 crc kubenswrapper[4755]: W1124 01:27:36.968976 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ca9fd9d_ad03_4e2e_b687_7e07be5c5c58.slice/crio-9a9563f758257628363492020910ccc961795c14b4d984ea353dd46adc159041 WatchSource:0}: Error finding container 9a9563f758257628363492020910ccc961795c14b4d984ea353dd46adc159041: Status 404 returned error can't find the container with id 9a9563f758257628363492020910ccc961795c14b4d984ea353dd46adc159041 Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.087388 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-8kczp"] Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.300067 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.304973 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-znqch\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.347116 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 24 01:27:37 crc kubenswrapper[4755]: W1124 01:27:37.354174 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c5a9efb_a4cf_4485_b3bd_972318209141.slice/crio-9861e6709e0cf6a41af1b51926696693a5e771a09a5f6848dadc61a0f0502a2d WatchSource:0}: Error finding container 9861e6709e0cf6a41af1b51926696693a5e771a09a5f6848dadc61a0f0502a2d: Status 404 returned error can't find the container with id 9861e6709e0cf6a41af1b51926696693a5e771a09a5f6848dadc61a0f0502a2d Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.504944 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.876255 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7c5a9efb-a4cf-4485-b3bd-972318209141","Type":"ContainerStarted","Data":"9861e6709e0cf6a41af1b51926696693a5e771a09a5f6848dadc61a0f0502a2d"} Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.877806 4755 generic.go:334] "Generic (PLEG): container finished" podID="7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58" containerID="bb91154909a62b263e692885b30dee539dc5f3d836bb2b07660fd71d36df7329" exitCode=0 Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.877873 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" event={"ID":"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58","Type":"ContainerDied","Data":"bb91154909a62b263e692885b30dee539dc5f3d836bb2b07660fd71d36df7329"} Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.877907 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" event={"ID":"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58","Type":"ContainerStarted","Data":"9a9563f758257628363492020910ccc961795c14b4d984ea353dd46adc159041"} Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.885648 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-8kczp" event={"ID":"509f3153-a59d-4614-a753-8cd8df81734c","Type":"ContainerStarted","Data":"fe89b0dc2a891b38f11d5b394079dd72c72207f01e3fe4dd9d166eaee1d4e3bc"} Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.885698 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-8kczp" event={"ID":"509f3153-a59d-4614-a753-8cd8df81734c","Type":"ContainerStarted","Data":"86993eca796aeae7dc4a0f7636c4d28de1932b97f36180d5c0fbb7a86127964f"} Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.940267 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-znqch"] Nov 24 01:27:37 crc kubenswrapper[4755]: I1124 01:27:37.952140 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-8kczp" podStartSLOduration=1.952117721 podStartE2EDuration="1.952117721s" podCreationTimestamp="2025-11-24 01:27:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:27:37.921630874 +0000 UTC m=+882.607696385" watchObservedRunningTime="2025-11-24 01:27:37.952117721 +0000 UTC m=+882.638183222" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.166093 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.166133 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.250740 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.304097 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-ovsdbserver-sb\") pod \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.304213 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-dns-svc\") pod \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.304284 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-config\") pod \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.304342 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rx2j2\" (UniqueName: \"kubernetes.io/projected/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-kube-api-access-rx2j2\") pod \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\" (UID: \"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58\") " Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.308187 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-kube-api-access-rx2j2" (OuterVolumeSpecName: "kube-api-access-rx2j2") pod "7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58" (UID: "7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58"). InnerVolumeSpecName "kube-api-access-rx2j2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.324252 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58" (UID: "7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.333417 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58" (UID: "7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.337008 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-config" (OuterVolumeSpecName: "config") pod "7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58" (UID: "7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.408406 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.408431 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.408441 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.408450 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rx2j2\" (UniqueName: \"kubernetes.io/projected/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58-kube-api-access-rx2j2\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.502718 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.893959 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" event={"ID":"7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58","Type":"ContainerDied","Data":"9a9563f758257628363492020910ccc961795c14b4d984ea353dd46adc159041"} Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.893986 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-7qk89" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.894288 4755 scope.go:117] "RemoveContainer" containerID="bb91154909a62b263e692885b30dee539dc5f3d836bb2b07660fd71d36df7329" Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.896878 4755 generic.go:334] "Generic (PLEG): container finished" podID="171cc508-1caf-4228-8e5b-6f34017960e2" containerID="e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c" exitCode=0 Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.896995 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" event={"ID":"171cc508-1caf-4228-8e5b-6f34017960e2","Type":"ContainerDied","Data":"e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c"} Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.897048 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" event={"ID":"171cc508-1caf-4228-8e5b-6f34017960e2","Type":"ContainerStarted","Data":"87a5ce330b5bc1ff0ea42504b2e536991b19416afc8418157aa352bd412c1764"} Nov 24 01:27:38 crc kubenswrapper[4755]: I1124 01:27:38.991725 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-7qk89"] Nov 24 01:27:39 crc kubenswrapper[4755]: I1124 01:27:38.996737 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-7qk89"] Nov 24 01:27:39 crc kubenswrapper[4755]: I1124 01:27:39.905353 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" event={"ID":"171cc508-1caf-4228-8e5b-6f34017960e2","Type":"ContainerStarted","Data":"c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a"} Nov 24 01:27:39 crc kubenswrapper[4755]: I1124 01:27:39.905695 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:39 crc kubenswrapper[4755]: I1124 01:27:39.908020 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7c5a9efb-a4cf-4485-b3bd-972318209141","Type":"ContainerStarted","Data":"5ef5d82c2291150b1e04e7e62d8dd2b11b970d868905b34b856308f009abf2e7"} Nov 24 01:27:39 crc kubenswrapper[4755]: I1124 01:27:39.908058 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7c5a9efb-a4cf-4485-b3bd-972318209141","Type":"ContainerStarted","Data":"ca2643be032624bc69d11014973d5b3a1a33979aa25a7bfdce2f6fa4affb0753"} Nov 24 01:27:39 crc kubenswrapper[4755]: I1124 01:27:39.908176 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Nov 24 01:27:39 crc kubenswrapper[4755]: I1124 01:27:39.928465 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" podStartSLOduration=3.928444423 podStartE2EDuration="3.928444423s" podCreationTimestamp="2025-11-24 01:27:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:27:39.921725554 +0000 UTC m=+884.607791065" watchObservedRunningTime="2025-11-24 01:27:39.928444423 +0000 UTC m=+884.614509934" Nov 24 01:27:39 crc kubenswrapper[4755]: I1124 01:27:39.947346 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.682294638 podStartE2EDuration="3.947328334s" podCreationTimestamp="2025-11-24 01:27:36 +0000 UTC" firstStartedPulling="2025-11-24 01:27:37.356138542 +0000 UTC m=+882.042204043" lastFinishedPulling="2025-11-24 01:27:38.621172238 +0000 UTC m=+883.307237739" observedRunningTime="2025-11-24 01:27:39.941189901 +0000 UTC m=+884.627255412" watchObservedRunningTime="2025-11-24 01:27:39.947328334 +0000 UTC m=+884.633393835" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.004828 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58" path="/var/lib/kubelet/pods/7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58/volumes" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.225181 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-znqch"] Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.260127 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-7gzpp"] Nov 24 01:27:40 crc kubenswrapper[4755]: E1124 01:27:40.260430 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58" containerName="init" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.260447 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58" containerName="init" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.260591 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ca9fd9d-ad03-4e2e-b687-7e07be5c5c58" containerName="init" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.261384 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.284678 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.287469 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7gzpp"] Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.351398 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-dns-svc\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.351662 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-config\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.351699 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.351746 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.351842 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k678p\" (UniqueName: \"kubernetes.io/projected/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-kube-api-access-k678p\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.453717 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.453767 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.453791 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k678p\" (UniqueName: \"kubernetes.io/projected/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-kube-api-access-k678p\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.453871 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-dns-svc\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.453966 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-config\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.454817 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.454853 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-config\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.455159 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.455315 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-dns-svc\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.477354 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k678p\" (UniqueName: \"kubernetes.io/projected/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-kube-api-access-k678p\") pod \"dnsmasq-dns-698758b865-7gzpp\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.543077 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.579182 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:40 crc kubenswrapper[4755]: I1124 01:27:40.656127 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.013545 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7gzpp"] Nov 24 01:27:41 crc kubenswrapper[4755]: W1124 01:27:41.018914 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4c9f27e_2ddf_443d_aa39_cbd02f43fc5c.slice/crio-0883fc84ad362db9d3f46534edd882003896d9e10aa8237bd7c9605438da3f33 WatchSource:0}: Error finding container 0883fc84ad362db9d3f46534edd882003896d9e10aa8237bd7c9605438da3f33: Status 404 returned error can't find the container with id 0883fc84ad362db9d3f46534edd882003896d9e10aa8237bd7c9605438da3f33 Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.351944 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.363679 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.365480 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-htwbd" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.365569 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.365974 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.369314 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.393716 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.471909 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-cache\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.472027 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.472056 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-lock\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.472091 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.472160 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfclv\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-kube-api-access-kfclv\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.573187 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-cache\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.573251 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.573268 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-lock\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.573292 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.573309 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfclv\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-kube-api-access-kfclv\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.574104 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-cache\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.574320 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: E1124 01:27:41.575829 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 24 01:27:41 crc kubenswrapper[4755]: E1124 01:27:41.575860 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.575882 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-lock\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: E1124 01:27:41.575922 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift podName:c3d5f6f4-a502-4cbf-95c6-e85416bcd559 nodeName:}" failed. No retries permitted until 2025-11-24 01:27:42.075901259 +0000 UTC m=+886.761966760 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift") pod "swift-storage-0" (UID: "c3d5f6f4-a502-4cbf-95c6-e85416bcd559") : configmap "swift-ring-files" not found Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.591068 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfclv\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-kube-api-access-kfclv\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.602106 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.925710 4755 generic.go:334] "Generic (PLEG): container finished" podID="a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" containerID="dd9f1e7ccce2b941c3befc1271f0411bcec3895fbac71fd0d49979c25d0a90fb" exitCode=0 Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.926226 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" podUID="171cc508-1caf-4228-8e5b-6f34017960e2" containerName="dnsmasq-dns" containerID="cri-o://c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a" gracePeriod=10 Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.927469 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7gzpp" event={"ID":"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c","Type":"ContainerDied","Data":"dd9f1e7ccce2b941c3befc1271f0411bcec3895fbac71fd0d49979c25d0a90fb"} Nov 24 01:27:41 crc kubenswrapper[4755]: I1124 01:27:41.927499 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7gzpp" event={"ID":"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c","Type":"ContainerStarted","Data":"0883fc84ad362db9d3f46534edd882003896d9e10aa8237bd7c9605438da3f33"} Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.092217 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:42 crc kubenswrapper[4755]: E1124 01:27:42.093219 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 24 01:27:42 crc kubenswrapper[4755]: E1124 01:27:42.093248 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 24 01:27:42 crc kubenswrapper[4755]: E1124 01:27:42.093300 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift podName:c3d5f6f4-a502-4cbf-95c6-e85416bcd559 nodeName:}" failed. No retries permitted until 2025-11-24 01:27:43.093284278 +0000 UTC m=+887.779349779 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift") pod "swift-storage-0" (UID: "c3d5f6f4-a502-4cbf-95c6-e85416bcd559") : configmap "swift-ring-files" not found Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.308253 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.399112 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-dns-svc\") pod \"171cc508-1caf-4228-8e5b-6f34017960e2\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.399217 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-nb\") pod \"171cc508-1caf-4228-8e5b-6f34017960e2\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.399293 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-config\") pod \"171cc508-1caf-4228-8e5b-6f34017960e2\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.399405 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcdsl\" (UniqueName: \"kubernetes.io/projected/171cc508-1caf-4228-8e5b-6f34017960e2-kube-api-access-wcdsl\") pod \"171cc508-1caf-4228-8e5b-6f34017960e2\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.399447 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-sb\") pod \"171cc508-1caf-4228-8e5b-6f34017960e2\" (UID: \"171cc508-1caf-4228-8e5b-6f34017960e2\") " Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.404870 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/171cc508-1caf-4228-8e5b-6f34017960e2-kube-api-access-wcdsl" (OuterVolumeSpecName: "kube-api-access-wcdsl") pod "171cc508-1caf-4228-8e5b-6f34017960e2" (UID: "171cc508-1caf-4228-8e5b-6f34017960e2"). InnerVolumeSpecName "kube-api-access-wcdsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.441786 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "171cc508-1caf-4228-8e5b-6f34017960e2" (UID: "171cc508-1caf-4228-8e5b-6f34017960e2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.442641 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-config" (OuterVolumeSpecName: "config") pod "171cc508-1caf-4228-8e5b-6f34017960e2" (UID: "171cc508-1caf-4228-8e5b-6f34017960e2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.442789 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "171cc508-1caf-4228-8e5b-6f34017960e2" (UID: "171cc508-1caf-4228-8e5b-6f34017960e2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.445330 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "171cc508-1caf-4228-8e5b-6f34017960e2" (UID: "171cc508-1caf-4228-8e5b-6f34017960e2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.501561 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.501593 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcdsl\" (UniqueName: \"kubernetes.io/projected/171cc508-1caf-4228-8e5b-6f34017960e2-kube-api-access-wcdsl\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.501617 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.501625 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.501633 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/171cc508-1caf-4228-8e5b-6f34017960e2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.914906 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.952079 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7gzpp" event={"ID":"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c","Type":"ContainerStarted","Data":"23452913ad37758af62107f2d6c27c168db80a2d9d06a76d76fc10c30136ab8b"} Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.952150 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.958074 4755 generic.go:334] "Generic (PLEG): container finished" podID="171cc508-1caf-4228-8e5b-6f34017960e2" containerID="c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a" exitCode=0 Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.958156 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" event={"ID":"171cc508-1caf-4228-8e5b-6f34017960e2","Type":"ContainerDied","Data":"c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a"} Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.958193 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" event={"ID":"171cc508-1caf-4228-8e5b-6f34017960e2","Type":"ContainerDied","Data":"87a5ce330b5bc1ff0ea42504b2e536991b19416afc8418157aa352bd412c1764"} Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.958159 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-znqch" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.958243 4755 scope.go:117] "RemoveContainer" containerID="c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.977329 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-7gzpp" podStartSLOduration=2.977309763 podStartE2EDuration="2.977309763s" podCreationTimestamp="2025-11-24 01:27:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:27:42.975459961 +0000 UTC m=+887.661525472" watchObservedRunningTime="2025-11-24 01:27:42.977309763 +0000 UTC m=+887.663375264" Nov 24 01:27:42 crc kubenswrapper[4755]: I1124 01:27:42.980534 4755 scope.go:117] "RemoveContainer" containerID="e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c" Nov 24 01:27:43 crc kubenswrapper[4755]: I1124 01:27:43.014808 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-znqch"] Nov 24 01:27:43 crc kubenswrapper[4755]: I1124 01:27:43.014981 4755 scope.go:117] "RemoveContainer" containerID="c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a" Nov 24 01:27:43 crc kubenswrapper[4755]: I1124 01:27:43.015138 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Nov 24 01:27:43 crc kubenswrapper[4755]: E1124 01:27:43.022972 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a\": container with ID starting with c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a not found: ID does not exist" containerID="c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a" Nov 24 01:27:43 crc kubenswrapper[4755]: I1124 01:27:43.023018 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a"} err="failed to get container status \"c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a\": rpc error: code = NotFound desc = could not find container \"c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a\": container with ID starting with c9c3e0a100476b4fc08c7bb0858a041a4899f3e49e8c326f5acc05c1f82b539a not found: ID does not exist" Nov 24 01:27:43 crc kubenswrapper[4755]: I1124 01:27:43.023061 4755 scope.go:117] "RemoveContainer" containerID="e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c" Nov 24 01:27:43 crc kubenswrapper[4755]: E1124 01:27:43.023665 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c\": container with ID starting with e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c not found: ID does not exist" containerID="e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c" Nov 24 01:27:43 crc kubenswrapper[4755]: I1124 01:27:43.023690 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c"} err="failed to get container status \"e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c\": rpc error: code = NotFound desc = could not find container \"e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c\": container with ID starting with e51d6346586a268f96b696fecc4423b94f14d9ff67a4f3f532e57ec0e528a78c not found: ID does not exist" Nov 24 01:27:43 crc kubenswrapper[4755]: I1124 01:27:43.029969 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-znqch"] Nov 24 01:27:43 crc kubenswrapper[4755]: I1124 01:27:43.118709 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:43 crc kubenswrapper[4755]: E1124 01:27:43.119220 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 24 01:27:43 crc kubenswrapper[4755]: E1124 01:27:43.119260 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 24 01:27:43 crc kubenswrapper[4755]: E1124 01:27:43.119358 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift podName:c3d5f6f4-a502-4cbf-95c6-e85416bcd559 nodeName:}" failed. No retries permitted until 2025-11-24 01:27:45.119305619 +0000 UTC m=+889.805371120 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift") pod "swift-storage-0" (UID: "c3d5f6f4-a502-4cbf-95c6-e85416bcd559") : configmap "swift-ring-files" not found Nov 24 01:27:44 crc kubenswrapper[4755]: I1124 01:27:44.005014 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="171cc508-1caf-4228-8e5b-6f34017960e2" path="/var/lib/kubelet/pods/171cc508-1caf-4228-8e5b-6f34017960e2/volumes" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.151010 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:45 crc kubenswrapper[4755]: E1124 01:27:45.151216 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 24 01:27:45 crc kubenswrapper[4755]: E1124 01:27:45.151236 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 24 01:27:45 crc kubenswrapper[4755]: E1124 01:27:45.151276 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift podName:c3d5f6f4-a502-4cbf-95c6-e85416bcd559 nodeName:}" failed. No retries permitted until 2025-11-24 01:27:49.151263134 +0000 UTC m=+893.837328635 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift") pod "swift-storage-0" (UID: "c3d5f6f4-a502-4cbf-95c6-e85416bcd559") : configmap "swift-ring-files" not found Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.342404 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-lpg42"] Nov 24 01:27:45 crc kubenswrapper[4755]: E1124 01:27:45.342758 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="171cc508-1caf-4228-8e5b-6f34017960e2" containerName="dnsmasq-dns" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.342775 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="171cc508-1caf-4228-8e5b-6f34017960e2" containerName="dnsmasq-dns" Nov 24 01:27:45 crc kubenswrapper[4755]: E1124 01:27:45.342790 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="171cc508-1caf-4228-8e5b-6f34017960e2" containerName="init" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.342799 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="171cc508-1caf-4228-8e5b-6f34017960e2" containerName="init" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.343024 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="171cc508-1caf-4228-8e5b-6f34017960e2" containerName="dnsmasq-dns" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.343707 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.345395 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.346028 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.349726 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.356052 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-lpg42"] Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.454705 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-combined-ca-bundle\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.454761 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-swiftconf\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.454855 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-dispersionconf\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.454965 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-scripts\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.455045 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-etc-swift\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.455132 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-ring-data-devices\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.455214 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4d56\" (UniqueName: \"kubernetes.io/projected/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-kube-api-access-c4d56\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.557061 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-combined-ca-bundle\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.557125 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-swiftconf\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.557190 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-dispersionconf\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.557206 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-scripts\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.557276 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-etc-swift\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.557306 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-ring-data-devices\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.557348 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4d56\" (UniqueName: \"kubernetes.io/projected/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-kube-api-access-c4d56\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.557807 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-etc-swift\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.558059 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-ring-data-devices\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.558522 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-scripts\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.562493 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-swiftconf\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.563055 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-combined-ca-bundle\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.574844 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4d56\" (UniqueName: \"kubernetes.io/projected/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-kube-api-access-c4d56\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.575249 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-dispersionconf\") pod \"swift-ring-rebalance-lpg42\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:45 crc kubenswrapper[4755]: I1124 01:27:45.671713 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:46 crc kubenswrapper[4755]: W1124 01:27:46.098462 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda67b2b8c_3846_4f9d_a5d0_9279028f63e5.slice/crio-2528d3399fcb2a29728cdb89bab76b017840807e546d9d91e5102df0dd3e88ef WatchSource:0}: Error finding container 2528d3399fcb2a29728cdb89bab76b017840807e546d9d91e5102df0dd3e88ef: Status 404 returned error can't find the container with id 2528d3399fcb2a29728cdb89bab76b017840807e546d9d91e5102df0dd3e88ef Nov 24 01:27:46 crc kubenswrapper[4755]: I1124 01:27:46.101226 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-lpg42"] Nov 24 01:27:46 crc kubenswrapper[4755]: I1124 01:27:46.992922 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-lpg42" event={"ID":"a67b2b8c-3846-4f9d-a5d0-9279028f63e5","Type":"ContainerStarted","Data":"2528d3399fcb2a29728cdb89bab76b017840807e546d9d91e5102df0dd3e88ef"} Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.138394 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5521-account-create-8qtxh"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.139997 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5521-account-create-8qtxh" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.142258 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.145712 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5521-account-create-8qtxh"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.205513 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf6zz\" (UniqueName: \"kubernetes.io/projected/ab03774c-aec3-4384-953c-4c8cff0ab899-kube-api-access-nf6zz\") pod \"keystone-5521-account-create-8qtxh\" (UID: \"ab03774c-aec3-4384-953c-4c8cff0ab899\") " pod="openstack/keystone-5521-account-create-8qtxh" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.205590 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab03774c-aec3-4384-953c-4c8cff0ab899-operator-scripts\") pod \"keystone-5521-account-create-8qtxh\" (UID: \"ab03774c-aec3-4384-953c-4c8cff0ab899\") " pod="openstack/keystone-5521-account-create-8qtxh" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.243148 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-4wps5"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.244235 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-4wps5" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.263679 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-4wps5"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.307115 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf6zz\" (UniqueName: \"kubernetes.io/projected/ab03774c-aec3-4384-953c-4c8cff0ab899-kube-api-access-nf6zz\") pod \"keystone-5521-account-create-8qtxh\" (UID: \"ab03774c-aec3-4384-953c-4c8cff0ab899\") " pod="openstack/keystone-5521-account-create-8qtxh" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.307524 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-operator-scripts\") pod \"keystone-db-create-4wps5\" (UID: \"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd\") " pod="openstack/keystone-db-create-4wps5" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.307573 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab03774c-aec3-4384-953c-4c8cff0ab899-operator-scripts\") pod \"keystone-5521-account-create-8qtxh\" (UID: \"ab03774c-aec3-4384-953c-4c8cff0ab899\") " pod="openstack/keystone-5521-account-create-8qtxh" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.307784 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srphp\" (UniqueName: \"kubernetes.io/projected/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-kube-api-access-srphp\") pod \"keystone-db-create-4wps5\" (UID: \"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd\") " pod="openstack/keystone-db-create-4wps5" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.308517 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab03774c-aec3-4384-953c-4c8cff0ab899-operator-scripts\") pod \"keystone-5521-account-create-8qtxh\" (UID: \"ab03774c-aec3-4384-953c-4c8cff0ab899\") " pod="openstack/keystone-5521-account-create-8qtxh" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.325588 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf6zz\" (UniqueName: \"kubernetes.io/projected/ab03774c-aec3-4384-953c-4c8cff0ab899-kube-api-access-nf6zz\") pod \"keystone-5521-account-create-8qtxh\" (UID: \"ab03774c-aec3-4384-953c-4c8cff0ab899\") " pod="openstack/keystone-5521-account-create-8qtxh" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.416685 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-operator-scripts\") pod \"keystone-db-create-4wps5\" (UID: \"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd\") " pod="openstack/keystone-db-create-4wps5" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.417016 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srphp\" (UniqueName: \"kubernetes.io/projected/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-kube-api-access-srphp\") pod \"keystone-db-create-4wps5\" (UID: \"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd\") " pod="openstack/keystone-db-create-4wps5" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.417589 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-operator-scripts\") pod \"keystone-db-create-4wps5\" (UID: \"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd\") " pod="openstack/keystone-db-create-4wps5" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.430335 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-p6ns4"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.433334 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-p6ns4" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.437428 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srphp\" (UniqueName: \"kubernetes.io/projected/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-kube-api-access-srphp\") pod \"keystone-db-create-4wps5\" (UID: \"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd\") " pod="openstack/keystone-db-create-4wps5" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.443205 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-p6ns4"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.450269 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-e4bc-account-create-hcblx"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.452723 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e4bc-account-create-hcblx" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.454504 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.456814 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e4bc-account-create-hcblx"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.472056 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5521-account-create-8qtxh" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.518532 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2387b3cd-ba87-4b14-b866-3a065b3d451c-operator-scripts\") pod \"placement-db-create-p6ns4\" (UID: \"2387b3cd-ba87-4b14-b866-3a065b3d451c\") " pod="openstack/placement-db-create-p6ns4" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.518586 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kjvm\" (UniqueName: \"kubernetes.io/projected/550a75dc-d88c-47f3-8c08-2803fb661736-kube-api-access-4kjvm\") pod \"placement-e4bc-account-create-hcblx\" (UID: \"550a75dc-d88c-47f3-8c08-2803fb661736\") " pod="openstack/placement-e4bc-account-create-hcblx" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.518908 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvbnw\" (UniqueName: \"kubernetes.io/projected/2387b3cd-ba87-4b14-b866-3a065b3d451c-kube-api-access-fvbnw\") pod \"placement-db-create-p6ns4\" (UID: \"2387b3cd-ba87-4b14-b866-3a065b3d451c\") " pod="openstack/placement-db-create-p6ns4" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.519008 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/550a75dc-d88c-47f3-8c08-2803fb661736-operator-scripts\") pod \"placement-e4bc-account-create-hcblx\" (UID: \"550a75dc-d88c-47f3-8c08-2803fb661736\") " pod="openstack/placement-e4bc-account-create-hcblx" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.562362 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-4wps5" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.620581 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvbnw\" (UniqueName: \"kubernetes.io/projected/2387b3cd-ba87-4b14-b866-3a065b3d451c-kube-api-access-fvbnw\") pod \"placement-db-create-p6ns4\" (UID: \"2387b3cd-ba87-4b14-b866-3a065b3d451c\") " pod="openstack/placement-db-create-p6ns4" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.620940 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/550a75dc-d88c-47f3-8c08-2803fb661736-operator-scripts\") pod \"placement-e4bc-account-create-hcblx\" (UID: \"550a75dc-d88c-47f3-8c08-2803fb661736\") " pod="openstack/placement-e4bc-account-create-hcblx" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.621369 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2387b3cd-ba87-4b14-b866-3a065b3d451c-operator-scripts\") pod \"placement-db-create-p6ns4\" (UID: \"2387b3cd-ba87-4b14-b866-3a065b3d451c\") " pod="openstack/placement-db-create-p6ns4" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.621529 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kjvm\" (UniqueName: \"kubernetes.io/projected/550a75dc-d88c-47f3-8c08-2803fb661736-kube-api-access-4kjvm\") pod \"placement-e4bc-account-create-hcblx\" (UID: \"550a75dc-d88c-47f3-8c08-2803fb661736\") " pod="openstack/placement-e4bc-account-create-hcblx" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.622420 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/550a75dc-d88c-47f3-8c08-2803fb661736-operator-scripts\") pod \"placement-e4bc-account-create-hcblx\" (UID: \"550a75dc-d88c-47f3-8c08-2803fb661736\") " pod="openstack/placement-e4bc-account-create-hcblx" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.623204 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2387b3cd-ba87-4b14-b866-3a065b3d451c-operator-scripts\") pod \"placement-db-create-p6ns4\" (UID: \"2387b3cd-ba87-4b14-b866-3a065b3d451c\") " pod="openstack/placement-db-create-p6ns4" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.629915 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-lp9ng"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.631117 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lp9ng" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.651127 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kjvm\" (UniqueName: \"kubernetes.io/projected/550a75dc-d88c-47f3-8c08-2803fb661736-kube-api-access-4kjvm\") pod \"placement-e4bc-account-create-hcblx\" (UID: \"550a75dc-d88c-47f3-8c08-2803fb661736\") " pod="openstack/placement-e4bc-account-create-hcblx" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.661401 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvbnw\" (UniqueName: \"kubernetes.io/projected/2387b3cd-ba87-4b14-b866-3a065b3d451c-kube-api-access-fvbnw\") pod \"placement-db-create-p6ns4\" (UID: \"2387b3cd-ba87-4b14-b866-3a065b3d451c\") " pod="openstack/placement-db-create-p6ns4" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.668587 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-lp9ng"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.725196 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a8c7a3e-d7af-4cdb-966b-d38e19315024-operator-scripts\") pod \"glance-db-create-lp9ng\" (UID: \"1a8c7a3e-d7af-4cdb-966b-d38e19315024\") " pod="openstack/glance-db-create-lp9ng" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.725317 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9jjh\" (UniqueName: \"kubernetes.io/projected/1a8c7a3e-d7af-4cdb-966b-d38e19315024-kube-api-access-h9jjh\") pod \"glance-db-create-lp9ng\" (UID: \"1a8c7a3e-d7af-4cdb-966b-d38e19315024\") " pod="openstack/glance-db-create-lp9ng" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.756656 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-0ff2-account-create-vpglm"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.758083 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0ff2-account-create-vpglm" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.760462 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.763504 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-0ff2-account-create-vpglm"] Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.789667 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-p6ns4" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.796668 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e4bc-account-create-hcblx" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.827118 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9jjh\" (UniqueName: \"kubernetes.io/projected/1a8c7a3e-d7af-4cdb-966b-d38e19315024-kube-api-access-h9jjh\") pod \"glance-db-create-lp9ng\" (UID: \"1a8c7a3e-d7af-4cdb-966b-d38e19315024\") " pod="openstack/glance-db-create-lp9ng" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.827377 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm5g8\" (UniqueName: \"kubernetes.io/projected/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-kube-api-access-dm5g8\") pod \"glance-0ff2-account-create-vpglm\" (UID: \"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38\") " pod="openstack/glance-0ff2-account-create-vpglm" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.827535 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-operator-scripts\") pod \"glance-0ff2-account-create-vpglm\" (UID: \"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38\") " pod="openstack/glance-0ff2-account-create-vpglm" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.827660 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a8c7a3e-d7af-4cdb-966b-d38e19315024-operator-scripts\") pod \"glance-db-create-lp9ng\" (UID: \"1a8c7a3e-d7af-4cdb-966b-d38e19315024\") " pod="openstack/glance-db-create-lp9ng" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.828733 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a8c7a3e-d7af-4cdb-966b-d38e19315024-operator-scripts\") pod \"glance-db-create-lp9ng\" (UID: \"1a8c7a3e-d7af-4cdb-966b-d38e19315024\") " pod="openstack/glance-db-create-lp9ng" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.847507 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9jjh\" (UniqueName: \"kubernetes.io/projected/1a8c7a3e-d7af-4cdb-966b-d38e19315024-kube-api-access-h9jjh\") pod \"glance-db-create-lp9ng\" (UID: \"1a8c7a3e-d7af-4cdb-966b-d38e19315024\") " pod="openstack/glance-db-create-lp9ng" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.935588 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm5g8\" (UniqueName: \"kubernetes.io/projected/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-kube-api-access-dm5g8\") pod \"glance-0ff2-account-create-vpglm\" (UID: \"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38\") " pod="openstack/glance-0ff2-account-create-vpglm" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.935764 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-operator-scripts\") pod \"glance-0ff2-account-create-vpglm\" (UID: \"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38\") " pod="openstack/glance-0ff2-account-create-vpglm" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.947184 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-operator-scripts\") pod \"glance-0ff2-account-create-vpglm\" (UID: \"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38\") " pod="openstack/glance-0ff2-account-create-vpglm" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.950391 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm5g8\" (UniqueName: \"kubernetes.io/projected/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-kube-api-access-dm5g8\") pod \"glance-0ff2-account-create-vpglm\" (UID: \"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38\") " pod="openstack/glance-0ff2-account-create-vpglm" Nov 24 01:27:48 crc kubenswrapper[4755]: I1124 01:27:48.999508 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lp9ng" Nov 24 01:27:49 crc kubenswrapper[4755]: I1124 01:27:49.084082 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0ff2-account-create-vpglm" Nov 24 01:27:49 crc kubenswrapper[4755]: I1124 01:27:49.245420 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:49 crc kubenswrapper[4755]: E1124 01:27:49.245659 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 24 01:27:49 crc kubenswrapper[4755]: E1124 01:27:49.245675 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 24 01:27:49 crc kubenswrapper[4755]: E1124 01:27:49.245715 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift podName:c3d5f6f4-a502-4cbf-95c6-e85416bcd559 nodeName:}" failed. No retries permitted until 2025-11-24 01:27:57.245699206 +0000 UTC m=+901.931764707 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift") pod "swift-storage-0" (UID: "c3d5f6f4-a502-4cbf-95c6-e85416bcd559") : configmap "swift-ring-files" not found Nov 24 01:27:49 crc kubenswrapper[4755]: I1124 01:27:49.919195 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-p6ns4"] Nov 24 01:27:49 crc kubenswrapper[4755]: W1124 01:27:49.924007 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2387b3cd_ba87_4b14_b866_3a065b3d451c.slice/crio-2d7374dc82c7a496bf8a2352c6481a2222f600084ef1072a090a3a1e79ac85c8 WatchSource:0}: Error finding container 2d7374dc82c7a496bf8a2352c6481a2222f600084ef1072a090a3a1e79ac85c8: Status 404 returned error can't find the container with id 2d7374dc82c7a496bf8a2352c6481a2222f600084ef1072a090a3a1e79ac85c8 Nov 24 01:27:49 crc kubenswrapper[4755]: I1124 01:27:49.944844 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e4bc-account-create-hcblx"] Nov 24 01:27:49 crc kubenswrapper[4755]: W1124 01:27:49.956492 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod550a75dc_d88c_47f3_8c08_2803fb661736.slice/crio-2d182fe42d801cdd01091a45e552bbd9d3288c7d1c6feb8145819b3fb81b5de3 WatchSource:0}: Error finding container 2d182fe42d801cdd01091a45e552bbd9d3288c7d1c6feb8145819b3fb81b5de3: Status 404 returned error can't find the container with id 2d182fe42d801cdd01091a45e552bbd9d3288c7d1c6feb8145819b3fb81b5de3 Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.029705 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-lp9ng"] Nov 24 01:27:50 crc kubenswrapper[4755]: W1124 01:27:50.037478 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a8c7a3e_d7af_4cdb_966b_d38e19315024.slice/crio-1ac0d8fb2ab88a6e1e10fe57970f9061f647c6760f5bd11c001b6c8d275786b0 WatchSource:0}: Error finding container 1ac0d8fb2ab88a6e1e10fe57970f9061f647c6760f5bd11c001b6c8d275786b0: Status 404 returned error can't find the container with id 1ac0d8fb2ab88a6e1e10fe57970f9061f647c6760f5bd11c001b6c8d275786b0 Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.045276 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e4bc-account-create-hcblx" event={"ID":"550a75dc-d88c-47f3-8c08-2803fb661736","Type":"ContainerStarted","Data":"2d182fe42d801cdd01091a45e552bbd9d3288c7d1c6feb8145819b3fb81b5de3"} Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.047061 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-lpg42" event={"ID":"a67b2b8c-3846-4f9d-a5d0-9279028f63e5","Type":"ContainerStarted","Data":"91fbfba841c4bb873b5c2e83030b215f85b1096fd0ca8e69a54d18e62bed7ac3"} Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.048245 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-0ff2-account-create-vpglm"] Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.055006 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-p6ns4" event={"ID":"2387b3cd-ba87-4b14-b866-3a065b3d451c","Type":"ContainerStarted","Data":"2d7374dc82c7a496bf8a2352c6481a2222f600084ef1072a090a3a1e79ac85c8"} Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.056722 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-4wps5"] Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.071931 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5521-account-create-8qtxh"] Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.075216 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-lpg42" podStartSLOduration=1.71019009 podStartE2EDuration="5.075204537s" podCreationTimestamp="2025-11-24 01:27:45 +0000 UTC" firstStartedPulling="2025-11-24 01:27:46.105033212 +0000 UTC m=+890.791098713" lastFinishedPulling="2025-11-24 01:27:49.470047659 +0000 UTC m=+894.156113160" observedRunningTime="2025-11-24 01:27:50.066133152 +0000 UTC m=+894.752198653" watchObservedRunningTime="2025-11-24 01:27:50.075204537 +0000 UTC m=+894.761270038" Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.581269 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.640418 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lh52s"] Nov 24 01:27:50 crc kubenswrapper[4755]: I1124 01:27:50.640704 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" podUID="88fbee01-1663-4ae9-9776-84ad70bfc066" containerName="dnsmasq-dns" containerID="cri-o://e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9" gracePeriod=10 Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.047867 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.099803 4755 generic.go:334] "Generic (PLEG): container finished" podID="1a8c7a3e-d7af-4cdb-966b-d38e19315024" containerID="151bf023041b498e9d302793d2829971b1c2a46c8c9f824f98fb85a964f05d97" exitCode=0 Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.099908 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lp9ng" event={"ID":"1a8c7a3e-d7af-4cdb-966b-d38e19315024","Type":"ContainerDied","Data":"151bf023041b498e9d302793d2829971b1c2a46c8c9f824f98fb85a964f05d97"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.099936 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lp9ng" event={"ID":"1a8c7a3e-d7af-4cdb-966b-d38e19315024","Type":"ContainerStarted","Data":"1ac0d8fb2ab88a6e1e10fe57970f9061f647c6760f5bd11c001b6c8d275786b0"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.103417 4755 generic.go:334] "Generic (PLEG): container finished" podID="9ee50ae2-bf98-4c78-a071-7dd4d45f58dd" containerID="0f376ddc4ce8638d6e40affd73239870fb3b461a106843874a1c8b82b38190e4" exitCode=0 Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.103486 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-4wps5" event={"ID":"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd","Type":"ContainerDied","Data":"0f376ddc4ce8638d6e40affd73239870fb3b461a106843874a1c8b82b38190e4"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.103513 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-4wps5" event={"ID":"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd","Type":"ContainerStarted","Data":"f4597ee2ce0e92cc7ca4a29fba6a8d1a2059cc486b52f704f5c91ecb7d84f036"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.105772 4755 generic.go:334] "Generic (PLEG): container finished" podID="88fbee01-1663-4ae9-9776-84ad70bfc066" containerID="e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9" exitCode=0 Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.105829 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" event={"ID":"88fbee01-1663-4ae9-9776-84ad70bfc066","Type":"ContainerDied","Data":"e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.105853 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" event={"ID":"88fbee01-1663-4ae9-9776-84ad70bfc066","Type":"ContainerDied","Data":"21ad5199fa54bd65e0db5e0e84bfc5456fc7edfd7a09c60aac5ecdfc56ff0c00"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.105873 4755 scope.go:117] "RemoveContainer" containerID="e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.106005 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-lh52s" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.107657 4755 generic.go:334] "Generic (PLEG): container finished" podID="2387b3cd-ba87-4b14-b866-3a065b3d451c" containerID="7263e34f93e200f35dd09fe136ab328ef993a1d2138b94e18518097dc4c2b4cd" exitCode=0 Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.107707 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-p6ns4" event={"ID":"2387b3cd-ba87-4b14-b866-3a065b3d451c","Type":"ContainerDied","Data":"7263e34f93e200f35dd09fe136ab328ef993a1d2138b94e18518097dc4c2b4cd"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.109318 4755 generic.go:334] "Generic (PLEG): container finished" podID="09e7a2d3-efe2-4e9c-b485-937b4b4a2d38" containerID="b0d3f78d72722d216025e67130fce70acfb1909b78af2d8215065933314ab20e" exitCode=0 Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.109356 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0ff2-account-create-vpglm" event={"ID":"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38","Type":"ContainerDied","Data":"b0d3f78d72722d216025e67130fce70acfb1909b78af2d8215065933314ab20e"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.109679 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0ff2-account-create-vpglm" event={"ID":"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38","Type":"ContainerStarted","Data":"e9113a664b0cd9afeb9d1d6d85059735c0865130e83aa8c99c34747e81e6e7c9"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.110789 4755 generic.go:334] "Generic (PLEG): container finished" podID="ab03774c-aec3-4384-953c-4c8cff0ab899" containerID="e1bc3fe47ad11c1a2caba8e59e6511b303318dc164b55c83ec7427dc08afd17f" exitCode=0 Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.110845 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5521-account-create-8qtxh" event={"ID":"ab03774c-aec3-4384-953c-4c8cff0ab899","Type":"ContainerDied","Data":"e1bc3fe47ad11c1a2caba8e59e6511b303318dc164b55c83ec7427dc08afd17f"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.110862 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5521-account-create-8qtxh" event={"ID":"ab03774c-aec3-4384-953c-4c8cff0ab899","Type":"ContainerStarted","Data":"26245ebd7f8ff016ec2a9f9c8b92d2899668dcf1b0f9c0b6d31f5b4ac46378fb"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.117999 4755 generic.go:334] "Generic (PLEG): container finished" podID="550a75dc-d88c-47f3-8c08-2803fb661736" containerID="af9a2fd5ed610ba25c55ec8f7243fe90e22f72f84e9edee48aabaa44099851aa" exitCode=0 Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.118713 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e4bc-account-create-hcblx" event={"ID":"550a75dc-d88c-47f3-8c08-2803fb661736","Type":"ContainerDied","Data":"af9a2fd5ed610ba25c55ec8f7243fe90e22f72f84e9edee48aabaa44099851aa"} Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.139510 4755 scope.go:117] "RemoveContainer" containerID="58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.163742 4755 scope.go:117] "RemoveContainer" containerID="e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9" Nov 24 01:27:51 crc kubenswrapper[4755]: E1124 01:27:51.168143 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9\": container with ID starting with e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9 not found: ID does not exist" containerID="e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.168183 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9"} err="failed to get container status \"e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9\": rpc error: code = NotFound desc = could not find container \"e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9\": container with ID starting with e2ac36003be7551d8a1c3b8d27f0442a93be825c126c3d7bace557c46a2f15a9 not found: ID does not exist" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.168205 4755 scope.go:117] "RemoveContainer" containerID="58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e" Nov 24 01:27:51 crc kubenswrapper[4755]: E1124 01:27:51.168563 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e\": container with ID starting with 58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e not found: ID does not exist" containerID="58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.168583 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e"} err="failed to get container status \"58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e\": rpc error: code = NotFound desc = could not find container \"58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e\": container with ID starting with 58f4f1823db24042f6994b07dd09e54dbefb168323a5330e38fd89ec71f32f8e not found: ID does not exist" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.192825 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx8pm\" (UniqueName: \"kubernetes.io/projected/88fbee01-1663-4ae9-9776-84ad70bfc066-kube-api-access-jx8pm\") pod \"88fbee01-1663-4ae9-9776-84ad70bfc066\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.192969 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-dns-svc\") pod \"88fbee01-1663-4ae9-9776-84ad70bfc066\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.193000 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-config\") pod \"88fbee01-1663-4ae9-9776-84ad70bfc066\" (UID: \"88fbee01-1663-4ae9-9776-84ad70bfc066\") " Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.198814 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88fbee01-1663-4ae9-9776-84ad70bfc066-kube-api-access-jx8pm" (OuterVolumeSpecName: "kube-api-access-jx8pm") pod "88fbee01-1663-4ae9-9776-84ad70bfc066" (UID: "88fbee01-1663-4ae9-9776-84ad70bfc066"). InnerVolumeSpecName "kube-api-access-jx8pm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.231738 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "88fbee01-1663-4ae9-9776-84ad70bfc066" (UID: "88fbee01-1663-4ae9-9776-84ad70bfc066"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.233173 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-config" (OuterVolumeSpecName: "config") pod "88fbee01-1663-4ae9-9776-84ad70bfc066" (UID: "88fbee01-1663-4ae9-9776-84ad70bfc066"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.294992 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.295026 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88fbee01-1663-4ae9-9776-84ad70bfc066-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.295036 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx8pm\" (UniqueName: \"kubernetes.io/projected/88fbee01-1663-4ae9-9776-84ad70bfc066-kube-api-access-jx8pm\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.443776 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lh52s"] Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.443852 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lh52s"] Nov 24 01:27:51 crc kubenswrapper[4755]: I1124 01:27:51.983751 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.007194 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88fbee01-1663-4ae9-9776-84ad70bfc066" path="/var/lib/kubelet/pods/88fbee01-1663-4ae9-9776-84ad70bfc066/volumes" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.531727 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0ff2-account-create-vpglm" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.618255 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-operator-scripts\") pod \"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38\" (UID: \"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.618438 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm5g8\" (UniqueName: \"kubernetes.io/projected/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-kube-api-access-dm5g8\") pod \"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38\" (UID: \"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.619453 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "09e7a2d3-efe2-4e9c-b485-937b4b4a2d38" (UID: "09e7a2d3-efe2-4e9c-b485-937b4b4a2d38"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.626163 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-kube-api-access-dm5g8" (OuterVolumeSpecName: "kube-api-access-dm5g8") pod "09e7a2d3-efe2-4e9c-b485-937b4b4a2d38" (UID: "09e7a2d3-efe2-4e9c-b485-937b4b4a2d38"). InnerVolumeSpecName "kube-api-access-dm5g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.699842 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e4bc-account-create-hcblx" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.710816 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5521-account-create-8qtxh" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.718136 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-4wps5" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.720319 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.720347 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm5g8\" (UniqueName: \"kubernetes.io/projected/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38-kube-api-access-dm5g8\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.740923 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lp9ng" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.746406 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-p6ns4" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.821540 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-operator-scripts\") pod \"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd\" (UID: \"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.821616 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kjvm\" (UniqueName: \"kubernetes.io/projected/550a75dc-d88c-47f3-8c08-2803fb661736-kube-api-access-4kjvm\") pod \"550a75dc-d88c-47f3-8c08-2803fb661736\" (UID: \"550a75dc-d88c-47f3-8c08-2803fb661736\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.821660 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab03774c-aec3-4384-953c-4c8cff0ab899-operator-scripts\") pod \"ab03774c-aec3-4384-953c-4c8cff0ab899\" (UID: \"ab03774c-aec3-4384-953c-4c8cff0ab899\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.821678 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9jjh\" (UniqueName: \"kubernetes.io/projected/1a8c7a3e-d7af-4cdb-966b-d38e19315024-kube-api-access-h9jjh\") pod \"1a8c7a3e-d7af-4cdb-966b-d38e19315024\" (UID: \"1a8c7a3e-d7af-4cdb-966b-d38e19315024\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.821707 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nf6zz\" (UniqueName: \"kubernetes.io/projected/ab03774c-aec3-4384-953c-4c8cff0ab899-kube-api-access-nf6zz\") pod \"ab03774c-aec3-4384-953c-4c8cff0ab899\" (UID: \"ab03774c-aec3-4384-953c-4c8cff0ab899\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.821761 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvbnw\" (UniqueName: \"kubernetes.io/projected/2387b3cd-ba87-4b14-b866-3a065b3d451c-kube-api-access-fvbnw\") pod \"2387b3cd-ba87-4b14-b866-3a065b3d451c\" (UID: \"2387b3cd-ba87-4b14-b866-3a065b3d451c\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.821806 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2387b3cd-ba87-4b14-b866-3a065b3d451c-operator-scripts\") pod \"2387b3cd-ba87-4b14-b866-3a065b3d451c\" (UID: \"2387b3cd-ba87-4b14-b866-3a065b3d451c\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.821831 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a8c7a3e-d7af-4cdb-966b-d38e19315024-operator-scripts\") pod \"1a8c7a3e-d7af-4cdb-966b-d38e19315024\" (UID: \"1a8c7a3e-d7af-4cdb-966b-d38e19315024\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.821856 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/550a75dc-d88c-47f3-8c08-2803fb661736-operator-scripts\") pod \"550a75dc-d88c-47f3-8c08-2803fb661736\" (UID: \"550a75dc-d88c-47f3-8c08-2803fb661736\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.821921 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srphp\" (UniqueName: \"kubernetes.io/projected/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-kube-api-access-srphp\") pod \"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd\" (UID: \"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd\") " Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.822720 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9ee50ae2-bf98-4c78-a071-7dd4d45f58dd" (UID: "9ee50ae2-bf98-4c78-a071-7dd4d45f58dd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.823046 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/550a75dc-d88c-47f3-8c08-2803fb661736-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "550a75dc-d88c-47f3-8c08-2803fb661736" (UID: "550a75dc-d88c-47f3-8c08-2803fb661736"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.823117 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2387b3cd-ba87-4b14-b866-3a065b3d451c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2387b3cd-ba87-4b14-b866-3a065b3d451c" (UID: "2387b3cd-ba87-4b14-b866-3a065b3d451c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.823220 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab03774c-aec3-4384-953c-4c8cff0ab899-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ab03774c-aec3-4384-953c-4c8cff0ab899" (UID: "ab03774c-aec3-4384-953c-4c8cff0ab899"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.823709 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a8c7a3e-d7af-4cdb-966b-d38e19315024-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1a8c7a3e-d7af-4cdb-966b-d38e19315024" (UID: "1a8c7a3e-d7af-4cdb-966b-d38e19315024"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.824963 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2387b3cd-ba87-4b14-b866-3a065b3d451c-kube-api-access-fvbnw" (OuterVolumeSpecName: "kube-api-access-fvbnw") pod "2387b3cd-ba87-4b14-b866-3a065b3d451c" (UID: "2387b3cd-ba87-4b14-b866-3a065b3d451c"). InnerVolumeSpecName "kube-api-access-fvbnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.825794 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab03774c-aec3-4384-953c-4c8cff0ab899-kube-api-access-nf6zz" (OuterVolumeSpecName: "kube-api-access-nf6zz") pod "ab03774c-aec3-4384-953c-4c8cff0ab899" (UID: "ab03774c-aec3-4384-953c-4c8cff0ab899"). InnerVolumeSpecName "kube-api-access-nf6zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.825853 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-kube-api-access-srphp" (OuterVolumeSpecName: "kube-api-access-srphp") pod "9ee50ae2-bf98-4c78-a071-7dd4d45f58dd" (UID: "9ee50ae2-bf98-4c78-a071-7dd4d45f58dd"). InnerVolumeSpecName "kube-api-access-srphp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.827274 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a8c7a3e-d7af-4cdb-966b-d38e19315024-kube-api-access-h9jjh" (OuterVolumeSpecName: "kube-api-access-h9jjh") pod "1a8c7a3e-d7af-4cdb-966b-d38e19315024" (UID: "1a8c7a3e-d7af-4cdb-966b-d38e19315024"). InnerVolumeSpecName "kube-api-access-h9jjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.828047 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/550a75dc-d88c-47f3-8c08-2803fb661736-kube-api-access-4kjvm" (OuterVolumeSpecName: "kube-api-access-4kjvm") pod "550a75dc-d88c-47f3-8c08-2803fb661736" (UID: "550a75dc-d88c-47f3-8c08-2803fb661736"). InnerVolumeSpecName "kube-api-access-4kjvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.923691 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srphp\" (UniqueName: \"kubernetes.io/projected/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-kube-api-access-srphp\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.923723 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.923733 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kjvm\" (UniqueName: \"kubernetes.io/projected/550a75dc-d88c-47f3-8c08-2803fb661736-kube-api-access-4kjvm\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.923741 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab03774c-aec3-4384-953c-4c8cff0ab899-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.923751 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9jjh\" (UniqueName: \"kubernetes.io/projected/1a8c7a3e-d7af-4cdb-966b-d38e19315024-kube-api-access-h9jjh\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.923761 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nf6zz\" (UniqueName: \"kubernetes.io/projected/ab03774c-aec3-4384-953c-4c8cff0ab899-kube-api-access-nf6zz\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.923772 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvbnw\" (UniqueName: \"kubernetes.io/projected/2387b3cd-ba87-4b14-b866-3a065b3d451c-kube-api-access-fvbnw\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.923781 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2387b3cd-ba87-4b14-b866-3a065b3d451c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.923789 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a8c7a3e-d7af-4cdb-966b-d38e19315024-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:52 crc kubenswrapper[4755]: I1124 01:27:52.923797 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/550a75dc-d88c-47f3-8c08-2803fb661736-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.135794 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0ff2-account-create-vpglm" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.135791 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0ff2-account-create-vpglm" event={"ID":"09e7a2d3-efe2-4e9c-b485-937b4b4a2d38","Type":"ContainerDied","Data":"e9113a664b0cd9afeb9d1d6d85059735c0865130e83aa8c99c34747e81e6e7c9"} Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.135863 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9113a664b0cd9afeb9d1d6d85059735c0865130e83aa8c99c34747e81e6e7c9" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.137152 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5521-account-create-8qtxh" event={"ID":"ab03774c-aec3-4384-953c-4c8cff0ab899","Type":"ContainerDied","Data":"26245ebd7f8ff016ec2a9f9c8b92d2899668dcf1b0f9c0b6d31f5b4ac46378fb"} Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.137183 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5521-account-create-8qtxh" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.137190 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26245ebd7f8ff016ec2a9f9c8b92d2899668dcf1b0f9c0b6d31f5b4ac46378fb" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.138420 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e4bc-account-create-hcblx" event={"ID":"550a75dc-d88c-47f3-8c08-2803fb661736","Type":"ContainerDied","Data":"2d182fe42d801cdd01091a45e552bbd9d3288c7d1c6feb8145819b3fb81b5de3"} Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.138446 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d182fe42d801cdd01091a45e552bbd9d3288c7d1c6feb8145819b3fb81b5de3" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.138510 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e4bc-account-create-hcblx" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.147501 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lp9ng" event={"ID":"1a8c7a3e-d7af-4cdb-966b-d38e19315024","Type":"ContainerDied","Data":"1ac0d8fb2ab88a6e1e10fe57970f9061f647c6760f5bd11c001b6c8d275786b0"} Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.147548 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ac0d8fb2ab88a6e1e10fe57970f9061f647c6760f5bd11c001b6c8d275786b0" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.147627 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lp9ng" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.149839 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-4wps5" event={"ID":"9ee50ae2-bf98-4c78-a071-7dd4d45f58dd","Type":"ContainerDied","Data":"f4597ee2ce0e92cc7ca4a29fba6a8d1a2059cc486b52f704f5c91ecb7d84f036"} Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.149883 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4597ee2ce0e92cc7ca4a29fba6a8d1a2059cc486b52f704f5c91ecb7d84f036" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.149958 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-4wps5" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.155024 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-p6ns4" event={"ID":"2387b3cd-ba87-4b14-b866-3a065b3d451c","Type":"ContainerDied","Data":"2d7374dc82c7a496bf8a2352c6481a2222f600084ef1072a090a3a1e79ac85c8"} Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.155086 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d7374dc82c7a496bf8a2352c6481a2222f600084ef1072a090a3a1e79ac85c8" Nov 24 01:27:53 crc kubenswrapper[4755]: I1124 01:27:53.155126 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-p6ns4" Nov 24 01:27:57 crc kubenswrapper[4755]: I1124 01:27:57.195323 4755 generic.go:334] "Generic (PLEG): container finished" podID="a67b2b8c-3846-4f9d-a5d0-9279028f63e5" containerID="91fbfba841c4bb873b5c2e83030b215f85b1096fd0ca8e69a54d18e62bed7ac3" exitCode=0 Nov 24 01:27:57 crc kubenswrapper[4755]: I1124 01:27:57.195425 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-lpg42" event={"ID":"a67b2b8c-3846-4f9d-a5d0-9279028f63e5","Type":"ContainerDied","Data":"91fbfba841c4bb873b5c2e83030b215f85b1096fd0ca8e69a54d18e62bed7ac3"} Nov 24 01:27:57 crc kubenswrapper[4755]: I1124 01:27:57.310906 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:57 crc kubenswrapper[4755]: I1124 01:27:57.319243 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c3d5f6f4-a502-4cbf-95c6-e85416bcd559-etc-swift\") pod \"swift-storage-0\" (UID: \"c3d5f6f4-a502-4cbf-95c6-e85416bcd559\") " pod="openstack/swift-storage-0" Nov 24 01:27:57 crc kubenswrapper[4755]: I1124 01:27:57.330035 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 24 01:27:57 crc kubenswrapper[4755]: I1124 01:27:57.964343 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Nov 24 01:27:57 crc kubenswrapper[4755]: W1124 01:27:57.965638 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3d5f6f4_a502_4cbf_95c6_e85416bcd559.slice/crio-c4e61611f3694c27d17e5c37ee1c49ce92194d9f1a4a1deb2d5e953549b7b3f1 WatchSource:0}: Error finding container c4e61611f3694c27d17e5c37ee1c49ce92194d9f1a4a1deb2d5e953549b7b3f1: Status 404 returned error can't find the container with id c4e61611f3694c27d17e5c37ee1c49ce92194d9f1a4a1deb2d5e953549b7b3f1 Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.205565 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"c4e61611f3694c27d17e5c37ee1c49ce92194d9f1a4a1deb2d5e953549b7b3f1"} Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.542113 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.634898 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-dispersionconf\") pod \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.634991 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-scripts\") pod \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.635030 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-swiftconf\") pod \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.635057 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4d56\" (UniqueName: \"kubernetes.io/projected/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-kube-api-access-c4d56\") pod \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.635154 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-combined-ca-bundle\") pod \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.635181 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-etc-swift\") pod \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.635219 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-ring-data-devices\") pod \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\" (UID: \"a67b2b8c-3846-4f9d-a5d0-9279028f63e5\") " Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.636257 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "a67b2b8c-3846-4f9d-a5d0-9279028f63e5" (UID: "a67b2b8c-3846-4f9d-a5d0-9279028f63e5"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.636776 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a67b2b8c-3846-4f9d-a5d0-9279028f63e5" (UID: "a67b2b8c-3846-4f9d-a5d0-9279028f63e5"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.642516 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-kube-api-access-c4d56" (OuterVolumeSpecName: "kube-api-access-c4d56") pod "a67b2b8c-3846-4f9d-a5d0-9279028f63e5" (UID: "a67b2b8c-3846-4f9d-a5d0-9279028f63e5"). InnerVolumeSpecName "kube-api-access-c4d56". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.645779 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "a67b2b8c-3846-4f9d-a5d0-9279028f63e5" (UID: "a67b2b8c-3846-4f9d-a5d0-9279028f63e5"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.661305 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "a67b2b8c-3846-4f9d-a5d0-9279028f63e5" (UID: "a67b2b8c-3846-4f9d-a5d0-9279028f63e5"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.666393 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-scripts" (OuterVolumeSpecName: "scripts") pod "a67b2b8c-3846-4f9d-a5d0-9279028f63e5" (UID: "a67b2b8c-3846-4f9d-a5d0-9279028f63e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.669528 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a67b2b8c-3846-4f9d-a5d0-9279028f63e5" (UID: "a67b2b8c-3846-4f9d-a5d0-9279028f63e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.744847 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.744893 4755 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.744903 4755 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-ring-data-devices\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.744911 4755 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-dispersionconf\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.744955 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.744963 4755 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-swiftconf\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.744971 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4d56\" (UniqueName: \"kubernetes.io/projected/a67b2b8c-3846-4f9d-a5d0-9279028f63e5-kube-api-access-c4d56\") on node \"crc\" DevicePath \"\"" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796052 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-vtf8f"] Nov 24 01:27:58 crc kubenswrapper[4755]: E1124 01:27:58.796415 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2387b3cd-ba87-4b14-b866-3a065b3d451c" containerName="mariadb-database-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796433 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2387b3cd-ba87-4b14-b866-3a065b3d451c" containerName="mariadb-database-create" Nov 24 01:27:58 crc kubenswrapper[4755]: E1124 01:27:58.796447 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88fbee01-1663-4ae9-9776-84ad70bfc066" containerName="dnsmasq-dns" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796454 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="88fbee01-1663-4ae9-9776-84ad70bfc066" containerName="dnsmasq-dns" Nov 24 01:27:58 crc kubenswrapper[4755]: E1124 01:27:58.796467 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ee50ae2-bf98-4c78-a071-7dd4d45f58dd" containerName="mariadb-database-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796476 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ee50ae2-bf98-4c78-a071-7dd4d45f58dd" containerName="mariadb-database-create" Nov 24 01:27:58 crc kubenswrapper[4755]: E1124 01:27:58.796485 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a8c7a3e-d7af-4cdb-966b-d38e19315024" containerName="mariadb-database-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796491 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a8c7a3e-d7af-4cdb-966b-d38e19315024" containerName="mariadb-database-create" Nov 24 01:27:58 crc kubenswrapper[4755]: E1124 01:27:58.796498 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab03774c-aec3-4384-953c-4c8cff0ab899" containerName="mariadb-account-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796504 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab03774c-aec3-4384-953c-4c8cff0ab899" containerName="mariadb-account-create" Nov 24 01:27:58 crc kubenswrapper[4755]: E1124 01:27:58.796510 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e7a2d3-efe2-4e9c-b485-937b4b4a2d38" containerName="mariadb-account-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796516 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e7a2d3-efe2-4e9c-b485-937b4b4a2d38" containerName="mariadb-account-create" Nov 24 01:27:58 crc kubenswrapper[4755]: E1124 01:27:58.796526 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="550a75dc-d88c-47f3-8c08-2803fb661736" containerName="mariadb-account-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796532 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="550a75dc-d88c-47f3-8c08-2803fb661736" containerName="mariadb-account-create" Nov 24 01:27:58 crc kubenswrapper[4755]: E1124 01:27:58.796550 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88fbee01-1663-4ae9-9776-84ad70bfc066" containerName="init" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796557 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="88fbee01-1663-4ae9-9776-84ad70bfc066" containerName="init" Nov 24 01:27:58 crc kubenswrapper[4755]: E1124 01:27:58.796572 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a67b2b8c-3846-4f9d-a5d0-9279028f63e5" containerName="swift-ring-rebalance" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796581 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a67b2b8c-3846-4f9d-a5d0-9279028f63e5" containerName="swift-ring-rebalance" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796748 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a67b2b8c-3846-4f9d-a5d0-9279028f63e5" containerName="swift-ring-rebalance" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796766 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2387b3cd-ba87-4b14-b866-3a065b3d451c" containerName="mariadb-database-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796775 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="550a75dc-d88c-47f3-8c08-2803fb661736" containerName="mariadb-account-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796786 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="88fbee01-1663-4ae9-9776-84ad70bfc066" containerName="dnsmasq-dns" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796793 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a8c7a3e-d7af-4cdb-966b-d38e19315024" containerName="mariadb-database-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796800 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e7a2d3-efe2-4e9c-b485-937b4b4a2d38" containerName="mariadb-account-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796812 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab03774c-aec3-4384-953c-4c8cff0ab899" containerName="mariadb-account-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.796827 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ee50ae2-bf98-4c78-a071-7dd4d45f58dd" containerName="mariadb-database-create" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.797342 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.799575 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.799763 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-7g7gm" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.803685 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-vtf8f"] Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.947976 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-combined-ca-bundle\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.948058 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-config-data\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.948219 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjk7d\" (UniqueName: \"kubernetes.io/projected/6dee4f3a-cb77-4137-b459-9c1be1a005ef-kube-api-access-hjk7d\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:58 crc kubenswrapper[4755]: I1124 01:27:58.948374 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-db-sync-config-data\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.050270 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjk7d\" (UniqueName: \"kubernetes.io/projected/6dee4f3a-cb77-4137-b459-9c1be1a005ef-kube-api-access-hjk7d\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.050353 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-db-sync-config-data\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.050411 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-combined-ca-bundle\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.050458 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-config-data\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.055017 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-config-data\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.055137 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-combined-ca-bundle\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.060206 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-db-sync-config-data\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.070148 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjk7d\" (UniqueName: \"kubernetes.io/projected/6dee4f3a-cb77-4137-b459-9c1be1a005ef-kube-api-access-hjk7d\") pod \"glance-db-sync-vtf8f\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.121942 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vtf8f" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.265885 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-lpg42" event={"ID":"a67b2b8c-3846-4f9d-a5d0-9279028f63e5","Type":"ContainerDied","Data":"2528d3399fcb2a29728cdb89bab76b017840807e546d9d91e5102df0dd3e88ef"} Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.266255 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2528d3399fcb2a29728cdb89bab76b017840807e546d9d91e5102df0dd3e88ef" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.265971 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lpg42" Nov 24 01:27:59 crc kubenswrapper[4755]: I1124 01:27:59.701403 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-vtf8f"] Nov 24 01:27:59 crc kubenswrapper[4755]: W1124 01:27:59.707828 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6dee4f3a_cb77_4137_b459_9c1be1a005ef.slice/crio-e3ff0415a1a18664bea73793ba706e1cb37d56a1ab2c46c8896bb6405dc0d911 WatchSource:0}: Error finding container e3ff0415a1a18664bea73793ba706e1cb37d56a1ab2c46c8896bb6405dc0d911: Status 404 returned error can't find the container with id e3ff0415a1a18664bea73793ba706e1cb37d56a1ab2c46c8896bb6405dc0d911 Nov 24 01:28:00 crc kubenswrapper[4755]: I1124 01:28:00.275436 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"7698ec3fe09db88801ff567471b9dea7db8f3578555b28e6129404593860e722"} Nov 24 01:28:00 crc kubenswrapper[4755]: I1124 01:28:00.275484 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"2aaf67e48099692a54ed391a6499aa420706e0783d4dcc1f2c2fc99a746e2f42"} Nov 24 01:28:00 crc kubenswrapper[4755]: I1124 01:28:00.275495 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"63e2c57b4cf5b546db9c4d581ad2ffc4e6ecb5603a7e40df6b6cf428bdb9ab2b"} Nov 24 01:28:00 crc kubenswrapper[4755]: I1124 01:28:00.275505 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"b2342535e892416a37daea71cc1ae3dee30722c1f376711f0fb80a3a6d2db689"} Nov 24 01:28:00 crc kubenswrapper[4755]: I1124 01:28:00.276752 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vtf8f" event={"ID":"6dee4f3a-cb77-4137-b459-9c1be1a005ef","Type":"ContainerStarted","Data":"e3ff0415a1a18664bea73793ba706e1cb37d56a1ab2c46c8896bb6405dc0d911"} Nov 24 01:28:01 crc kubenswrapper[4755]: I1124 01:28:01.304522 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"a87057f4682bb8d26177e2cbd6acb5e281b6880955d5b28b9285a15c688f6b78"} Nov 24 01:28:01 crc kubenswrapper[4755]: I1124 01:28:01.304859 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"0352507c67b102fc58d7d256cf940c8ccb5b9e78b0332a3056726afdf47f687b"} Nov 24 01:28:02 crc kubenswrapper[4755]: I1124 01:28:02.315545 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"3e17c4a0e895d5f8f33e846a503aad0e81850158c836f1465eb54688d9ae0bc2"} Nov 24 01:28:02 crc kubenswrapper[4755]: I1124 01:28:02.315594 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"b7e2592ae53bc8037f5a6251f9c098b281eb08926173720840e34e74f6686899"} Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.294918 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.295292 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.324586 4755 generic.go:334] "Generic (PLEG): container finished" podID="20a66507-c5f4-43d2-a99b-18daaffea30f" containerID="01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563" exitCode=0 Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.324686 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"20a66507-c5f4-43d2-a99b-18daaffea30f","Type":"ContainerDied","Data":"01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563"} Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.328289 4755 generic.go:334] "Generic (PLEG): container finished" podID="34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" containerID="5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca" exitCode=0 Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.328342 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed","Type":"ContainerDied","Data":"5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca"} Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.337526 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"2f4dd057075821836c7d5b6dbd0f3cf04ea714303473800b86e4493bca1bf766"} Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.337730 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"ef8fbaa24d9c4e1fe097adb806b358d85ff6a2ca2843bc579b8f1e2946ef5885"} Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.337805 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"9d11904932127008b947c8f3ec35084724c656e993a8e96d6a42064063e715dc"} Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.337888 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"0da520875f08c9fcbcdfe688da11ea4dccb0234fb18f09d2091a6b58dfe86380"} Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.657031 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-7tzgl" podUID="06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0" containerName="ovn-controller" probeResult="failure" output=< Nov 24 01:28:03 crc kubenswrapper[4755]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 24 01:28:03 crc kubenswrapper[4755]: > Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.666146 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.681492 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-khjj5" Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.896341 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-7tzgl-config-hckt6"] Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.897706 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.908482 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-7tzgl-config-hckt6"] Nov 24 01:28:03 crc kubenswrapper[4755]: I1124 01:28:03.912173 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.032920 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.033187 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run-ovn\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.033320 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-additional-scripts\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.033396 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-log-ovn\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.033486 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-scripts\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.033580 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tkgv\" (UniqueName: \"kubernetes.io/projected/ee11da05-0910-4c0b-99f6-cab004e610d3-kube-api-access-5tkgv\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.134868 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-log-ovn\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.135161 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-scripts\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.135293 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-log-ovn\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.135297 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tkgv\" (UniqueName: \"kubernetes.io/projected/ee11da05-0910-4c0b-99f6-cab004e610d3-kube-api-access-5tkgv\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.135454 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.135481 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run-ovn\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.135681 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-additional-scripts\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.136312 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-additional-scripts\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.136366 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.136401 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run-ovn\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.138181 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-scripts\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.155959 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tkgv\" (UniqueName: \"kubernetes.io/projected/ee11da05-0910-4c0b-99f6-cab004e610d3-kube-api-access-5tkgv\") pod \"ovn-controller-7tzgl-config-hckt6\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.214369 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.348854 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"20a66507-c5f4-43d2-a99b-18daaffea30f","Type":"ContainerStarted","Data":"27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b"} Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.349372 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.350967 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed","Type":"ContainerStarted","Data":"a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147"} Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.351194 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.361036 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"f2ce0eb5a6fcdfda904c352dc8b158f1c115bbfebfd5bc975fe086fdb96e66f5"} Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.361097 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"d9d2830b9b524acbb6631f66352516cadaa910ebdfc38d19267e8be4e9cc2088"} Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.361107 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c3d5f6f4-a502-4cbf-95c6-e85416bcd559","Type":"ContainerStarted","Data":"69f598dd2bea335800bc78986ef9c3165632f2128634da5c1cc5524a4914076f"} Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.394899 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=50.817996024 podStartE2EDuration="1m1.394874861s" podCreationTimestamp="2025-11-24 01:27:03 +0000 UTC" firstStartedPulling="2025-11-24 01:27:18.756334722 +0000 UTC m=+863.442400223" lastFinishedPulling="2025-11-24 01:27:29.333213569 +0000 UTC m=+874.019279060" observedRunningTime="2025-11-24 01:28:04.374747804 +0000 UTC m=+909.060813315" watchObservedRunningTime="2025-11-24 01:28:04.394874861 +0000 UTC m=+909.080940362" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.398390 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=50.485201579 podStartE2EDuration="1m1.398366369s" podCreationTimestamp="2025-11-24 01:27:03 +0000 UTC" firstStartedPulling="2025-11-24 01:27:18.751128266 +0000 UTC m=+863.437193757" lastFinishedPulling="2025-11-24 01:27:29.664293046 +0000 UTC m=+874.350358547" observedRunningTime="2025-11-24 01:28:04.395452007 +0000 UTC m=+909.081517508" watchObservedRunningTime="2025-11-24 01:28:04.398366369 +0000 UTC m=+909.084431890" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.447347 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=19.931022055 podStartE2EDuration="24.447326936s" podCreationTimestamp="2025-11-24 01:27:40 +0000 UTC" firstStartedPulling="2025-11-24 01:27:57.968741049 +0000 UTC m=+902.654806560" lastFinishedPulling="2025-11-24 01:28:02.48504595 +0000 UTC m=+907.171111441" observedRunningTime="2025-11-24 01:28:04.433921459 +0000 UTC m=+909.119986960" watchObservedRunningTime="2025-11-24 01:28:04.447326936 +0000 UTC m=+909.133392427" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.668719 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-7tzgl-config-hckt6"] Nov 24 01:28:04 crc kubenswrapper[4755]: W1124 01:28:04.678034 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee11da05_0910_4c0b_99f6_cab004e610d3.slice/crio-b7a1f686b8abd70ddb6d2dfddbe01f2655ac8ed78caae83e196467d0aa5e791a WatchSource:0}: Error finding container b7a1f686b8abd70ddb6d2dfddbe01f2655ac8ed78caae83e196467d0aa5e791a: Status 404 returned error can't find the container with id b7a1f686b8abd70ddb6d2dfddbe01f2655ac8ed78caae83e196467d0aa5e791a Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.783549 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-x9ksn"] Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.785462 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.788356 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.803150 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-x9ksn"] Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.847264 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-config\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.847408 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.847433 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.847466 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.847665 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnthb\" (UniqueName: \"kubernetes.io/projected/46a31b0c-c17e-469f-823e-f56504308b2c-kube-api-access-tnthb\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.847711 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.950298 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-config\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.950384 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.950404 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.950428 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.950521 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnthb\" (UniqueName: \"kubernetes.io/projected/46a31b0c-c17e-469f-823e-f56504308b2c-kube-api-access-tnthb\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.950557 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.951718 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.951850 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.952227 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-config\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.952468 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.953686 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:04 crc kubenswrapper[4755]: I1124 01:28:04.975583 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnthb\" (UniqueName: \"kubernetes.io/projected/46a31b0c-c17e-469f-823e-f56504308b2c-kube-api-access-tnthb\") pod \"dnsmasq-dns-77585f5f8c-x9ksn\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:05 crc kubenswrapper[4755]: I1124 01:28:05.113332 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:05 crc kubenswrapper[4755]: I1124 01:28:05.367894 4755 generic.go:334] "Generic (PLEG): container finished" podID="ee11da05-0910-4c0b-99f6-cab004e610d3" containerID="0fd999277818271c6f1cd4138ef94919ae3efab382f7c50cf77c5f04729957de" exitCode=0 Nov 24 01:28:05 crc kubenswrapper[4755]: I1124 01:28:05.368043 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7tzgl-config-hckt6" event={"ID":"ee11da05-0910-4c0b-99f6-cab004e610d3","Type":"ContainerDied","Data":"0fd999277818271c6f1cd4138ef94919ae3efab382f7c50cf77c5f04729957de"} Nov 24 01:28:05 crc kubenswrapper[4755]: I1124 01:28:05.368094 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7tzgl-config-hckt6" event={"ID":"ee11da05-0910-4c0b-99f6-cab004e610d3","Type":"ContainerStarted","Data":"b7a1f686b8abd70ddb6d2dfddbe01f2655ac8ed78caae83e196467d0aa5e791a"} Nov 24 01:28:08 crc kubenswrapper[4755]: I1124 01:28:08.664448 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-7tzgl" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.294731 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.409206 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tkgv\" (UniqueName: \"kubernetes.io/projected/ee11da05-0910-4c0b-99f6-cab004e610d3-kube-api-access-5tkgv\") pod \"ee11da05-0910-4c0b-99f6-cab004e610d3\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.409531 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-scripts\") pod \"ee11da05-0910-4c0b-99f6-cab004e610d3\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.409651 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-log-ovn\") pod \"ee11da05-0910-4c0b-99f6-cab004e610d3\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.409855 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "ee11da05-0910-4c0b-99f6-cab004e610d3" (UID: "ee11da05-0910-4c0b-99f6-cab004e610d3"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.409889 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-additional-scripts\") pod \"ee11da05-0910-4c0b-99f6-cab004e610d3\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.409940 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run\") pod \"ee11da05-0910-4c0b-99f6-cab004e610d3\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.410015 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run-ovn\") pod \"ee11da05-0910-4c0b-99f6-cab004e610d3\" (UID: \"ee11da05-0910-4c0b-99f6-cab004e610d3\") " Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.410300 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run" (OuterVolumeSpecName: "var-run") pod "ee11da05-0910-4c0b-99f6-cab004e610d3" (UID: "ee11da05-0910-4c0b-99f6-cab004e610d3"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.410353 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "ee11da05-0910-4c0b-99f6-cab004e610d3" (UID: "ee11da05-0910-4c0b-99f6-cab004e610d3"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.410398 4755 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.410414 4755 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.410778 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "ee11da05-0910-4c0b-99f6-cab004e610d3" (UID: "ee11da05-0910-4c0b-99f6-cab004e610d3"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.412538 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-scripts" (OuterVolumeSpecName: "scripts") pod "ee11da05-0910-4c0b-99f6-cab004e610d3" (UID: "ee11da05-0910-4c0b-99f6-cab004e610d3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.414195 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee11da05-0910-4c0b-99f6-cab004e610d3-kube-api-access-5tkgv" (OuterVolumeSpecName: "kube-api-access-5tkgv") pod "ee11da05-0910-4c0b-99f6-cab004e610d3" (UID: "ee11da05-0910-4c0b-99f6-cab004e610d3"). InnerVolumeSpecName "kube-api-access-5tkgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.416465 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7tzgl-config-hckt6" event={"ID":"ee11da05-0910-4c0b-99f6-cab004e610d3","Type":"ContainerDied","Data":"b7a1f686b8abd70ddb6d2dfddbe01f2655ac8ed78caae83e196467d0aa5e791a"} Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.416498 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7a1f686b8abd70ddb6d2dfddbe01f2655ac8ed78caae83e196467d0aa5e791a" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.416538 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7tzgl-config-hckt6" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.512183 4755 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee11da05-0910-4c0b-99f6-cab004e610d3-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.512219 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tkgv\" (UniqueName: \"kubernetes.io/projected/ee11da05-0910-4c0b-99f6-cab004e610d3-kube-api-access-5tkgv\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.512233 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.512245 4755 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ee11da05-0910-4c0b-99f6-cab004e610d3-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:11 crc kubenswrapper[4755]: I1124 01:28:11.679312 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-x9ksn"] Nov 24 01:28:11 crc kubenswrapper[4755]: W1124 01:28:11.689297 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46a31b0c_c17e_469f_823e_f56504308b2c.slice/crio-736bd529d6daea5fd8d750db531e41f333947b1f93f84a59970a22a96b87c610 WatchSource:0}: Error finding container 736bd529d6daea5fd8d750db531e41f333947b1f93f84a59970a22a96b87c610: Status 404 returned error can't find the container with id 736bd529d6daea5fd8d750db531e41f333947b1f93f84a59970a22a96b87c610 Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.411517 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-7tzgl-config-hckt6"] Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.416946 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-7tzgl-config-hckt6"] Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.426527 4755 generic.go:334] "Generic (PLEG): container finished" podID="46a31b0c-c17e-469f-823e-f56504308b2c" containerID="9383f126143afb21db44e9f74b6428ff36c029f2dce010a7f7a7cc6953b6afdd" exitCode=0 Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.426664 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" event={"ID":"46a31b0c-c17e-469f-823e-f56504308b2c","Type":"ContainerDied","Data":"9383f126143afb21db44e9f74b6428ff36c029f2dce010a7f7a7cc6953b6afdd"} Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.426704 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" event={"ID":"46a31b0c-c17e-469f-823e-f56504308b2c","Type":"ContainerStarted","Data":"736bd529d6daea5fd8d750db531e41f333947b1f93f84a59970a22a96b87c610"} Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.429632 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vtf8f" event={"ID":"6dee4f3a-cb77-4137-b459-9c1be1a005ef","Type":"ContainerStarted","Data":"f59b4c5cd502b8565f2159fdb02163c0d0fd6805f2b214da42cfaa7873855440"} Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.479844 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-vtf8f" podStartSLOduration=2.8687238 podStartE2EDuration="14.479828629s" podCreationTimestamp="2025-11-24 01:27:58 +0000 UTC" firstStartedPulling="2025-11-24 01:27:59.712095564 +0000 UTC m=+904.398161065" lastFinishedPulling="2025-11-24 01:28:11.323200393 +0000 UTC m=+916.009265894" observedRunningTime="2025-11-24 01:28:12.478127351 +0000 UTC m=+917.164192862" watchObservedRunningTime="2025-11-24 01:28:12.479828629 +0000 UTC m=+917.165894130" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.510991 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-7tzgl-config-7cbwn"] Nov 24 01:28:12 crc kubenswrapper[4755]: E1124 01:28:12.512103 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee11da05-0910-4c0b-99f6-cab004e610d3" containerName="ovn-config" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.512299 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee11da05-0910-4c0b-99f6-cab004e610d3" containerName="ovn-config" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.512687 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee11da05-0910-4c0b-99f6-cab004e610d3" containerName="ovn-config" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.513279 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.515083 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.532396 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-7tzgl-config-7cbwn"] Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.630292 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.630378 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run-ovn\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.630428 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-scripts\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.630659 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-log-ovn\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.630768 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjdmr\" (UniqueName: \"kubernetes.io/projected/ee2730ba-9a5e-48eb-be37-0cc250092218-kube-api-access-cjdmr\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.630828 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-additional-scripts\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.732062 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-log-ovn\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.732132 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjdmr\" (UniqueName: \"kubernetes.io/projected/ee2730ba-9a5e-48eb-be37-0cc250092218-kube-api-access-cjdmr\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.732156 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-additional-scripts\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.732207 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.732223 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run-ovn\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.732248 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-scripts\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.732396 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-log-ovn\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.732959 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.733016 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run-ovn\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.733086 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-additional-scripts\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.734089 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-scripts\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.759749 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjdmr\" (UniqueName: \"kubernetes.io/projected/ee2730ba-9a5e-48eb-be37-0cc250092218-kube-api-access-cjdmr\") pod \"ovn-controller-7tzgl-config-7cbwn\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:12 crc kubenswrapper[4755]: I1124 01:28:12.885033 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:13 crc kubenswrapper[4755]: I1124 01:28:13.382776 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-7tzgl-config-7cbwn"] Nov 24 01:28:13 crc kubenswrapper[4755]: I1124 01:28:13.437508 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7tzgl-config-7cbwn" event={"ID":"ee2730ba-9a5e-48eb-be37-0cc250092218","Type":"ContainerStarted","Data":"dd8aa7e63910cadfc98a9cddc65d7096fb47da7b26b9237a4f4a7bd4cc396ad6"} Nov 24 01:28:13 crc kubenswrapper[4755]: I1124 01:28:13.439751 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" event={"ID":"46a31b0c-c17e-469f-823e-f56504308b2c","Type":"ContainerStarted","Data":"29634fe605bd85d02d49022c7d12a34c46e0e4db749363cdbab033f911221084"} Nov 24 01:28:13 crc kubenswrapper[4755]: I1124 01:28:13.439810 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:13 crc kubenswrapper[4755]: I1124 01:28:13.462555 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" podStartSLOduration=9.46253748 podStartE2EDuration="9.46253748s" podCreationTimestamp="2025-11-24 01:28:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:28:13.459732641 +0000 UTC m=+918.145798142" watchObservedRunningTime="2025-11-24 01:28:13.46253748 +0000 UTC m=+918.148602981" Nov 24 01:28:14 crc kubenswrapper[4755]: I1124 01:28:14.006073 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee11da05-0910-4c0b-99f6-cab004e610d3" path="/var/lib/kubelet/pods/ee11da05-0910-4c0b-99f6-cab004e610d3/volumes" Nov 24 01:28:14 crc kubenswrapper[4755]: I1124 01:28:14.448023 4755 generic.go:334] "Generic (PLEG): container finished" podID="ee2730ba-9a5e-48eb-be37-0cc250092218" containerID="53b4f57f1b325411337fe5690099efb35761bc35ef40ea8dfb11e204f708a43a" exitCode=0 Nov 24 01:28:14 crc kubenswrapper[4755]: I1124 01:28:14.448065 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7tzgl-config-7cbwn" event={"ID":"ee2730ba-9a5e-48eb-be37-0cc250092218","Type":"ContainerDied","Data":"53b4f57f1b325411337fe5690099efb35761bc35ef40ea8dfb11e204f708a43a"} Nov 24 01:28:14 crc kubenswrapper[4755]: I1124 01:28:14.879847 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.138849 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.156103 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-k42zb"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.157681 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-k42zb" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.170154 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-k42zb"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.262444 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-w9lth"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.263652 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-w9lth" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.270224 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-5283-account-create-fhrq4"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.271268 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5283-account-create-fhrq4" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.273936 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.276099 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xrj5\" (UniqueName: \"kubernetes.io/projected/d8b666dc-55f4-4e9f-9768-147815005e1e-kube-api-access-4xrj5\") pod \"cinder-db-create-k42zb\" (UID: \"d8b666dc-55f4-4e9f-9768-147815005e1e\") " pod="openstack/cinder-db-create-k42zb" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.276166 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8b666dc-55f4-4e9f-9768-147815005e1e-operator-scripts\") pod \"cinder-db-create-k42zb\" (UID: \"d8b666dc-55f4-4e9f-9768-147815005e1e\") " pod="openstack/cinder-db-create-k42zb" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.304725 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-w9lth"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.311929 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5283-account-create-fhrq4"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.362336 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-a00b-account-create-dmk77"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.363368 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a00b-account-create-dmk77" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.365840 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.375637 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-a00b-account-create-dmk77"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.409963 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7t5m\" (UniqueName: \"kubernetes.io/projected/866eeb81-622c-4ea2-a727-184e5e7d745c-kube-api-access-j7t5m\") pod \"cinder-5283-account-create-fhrq4\" (UID: \"866eeb81-622c-4ea2-a727-184e5e7d745c\") " pod="openstack/cinder-5283-account-create-fhrq4" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.410078 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c85f6518-6046-441c-afae-4bd797cb807f-operator-scripts\") pod \"barbican-db-create-w9lth\" (UID: \"c85f6518-6046-441c-afae-4bd797cb807f\") " pod="openstack/barbican-db-create-w9lth" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.410122 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rm8b\" (UniqueName: \"kubernetes.io/projected/c85f6518-6046-441c-afae-4bd797cb807f-kube-api-access-9rm8b\") pod \"barbican-db-create-w9lth\" (UID: \"c85f6518-6046-441c-afae-4bd797cb807f\") " pod="openstack/barbican-db-create-w9lth" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.410183 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/866eeb81-622c-4ea2-a727-184e5e7d745c-operator-scripts\") pod \"cinder-5283-account-create-fhrq4\" (UID: \"866eeb81-622c-4ea2-a727-184e5e7d745c\") " pod="openstack/cinder-5283-account-create-fhrq4" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.410242 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xrj5\" (UniqueName: \"kubernetes.io/projected/d8b666dc-55f4-4e9f-9768-147815005e1e-kube-api-access-4xrj5\") pod \"cinder-db-create-k42zb\" (UID: \"d8b666dc-55f4-4e9f-9768-147815005e1e\") " pod="openstack/cinder-db-create-k42zb" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.410291 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8b666dc-55f4-4e9f-9768-147815005e1e-operator-scripts\") pod \"cinder-db-create-k42zb\" (UID: \"d8b666dc-55f4-4e9f-9768-147815005e1e\") " pod="openstack/cinder-db-create-k42zb" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.411090 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8b666dc-55f4-4e9f-9768-147815005e1e-operator-scripts\") pod \"cinder-db-create-k42zb\" (UID: \"d8b666dc-55f4-4e9f-9768-147815005e1e\") " pod="openstack/cinder-db-create-k42zb" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.440989 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xrj5\" (UniqueName: \"kubernetes.io/projected/d8b666dc-55f4-4e9f-9768-147815005e1e-kube-api-access-4xrj5\") pod \"cinder-db-create-k42zb\" (UID: \"d8b666dc-55f4-4e9f-9768-147815005e1e\") " pod="openstack/cinder-db-create-k42zb" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.479488 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-k42zb" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.511595 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c85f6518-6046-441c-afae-4bd797cb807f-operator-scripts\") pod \"barbican-db-create-w9lth\" (UID: \"c85f6518-6046-441c-afae-4bd797cb807f\") " pod="openstack/barbican-db-create-w9lth" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.511680 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-operator-scripts\") pod \"barbican-a00b-account-create-dmk77\" (UID: \"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8\") " pod="openstack/barbican-a00b-account-create-dmk77" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.511752 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rm8b\" (UniqueName: \"kubernetes.io/projected/c85f6518-6046-441c-afae-4bd797cb807f-kube-api-access-9rm8b\") pod \"barbican-db-create-w9lth\" (UID: \"c85f6518-6046-441c-afae-4bd797cb807f\") " pod="openstack/barbican-db-create-w9lth" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.511798 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/866eeb81-622c-4ea2-a727-184e5e7d745c-operator-scripts\") pod \"cinder-5283-account-create-fhrq4\" (UID: \"866eeb81-622c-4ea2-a727-184e5e7d745c\") " pod="openstack/cinder-5283-account-create-fhrq4" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.511864 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flfph\" (UniqueName: \"kubernetes.io/projected/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-kube-api-access-flfph\") pod \"barbican-a00b-account-create-dmk77\" (UID: \"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8\") " pod="openstack/barbican-a00b-account-create-dmk77" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.512426 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c85f6518-6046-441c-afae-4bd797cb807f-operator-scripts\") pod \"barbican-db-create-w9lth\" (UID: \"c85f6518-6046-441c-afae-4bd797cb807f\") " pod="openstack/barbican-db-create-w9lth" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.513418 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/866eeb81-622c-4ea2-a727-184e5e7d745c-operator-scripts\") pod \"cinder-5283-account-create-fhrq4\" (UID: \"866eeb81-622c-4ea2-a727-184e5e7d745c\") " pod="openstack/cinder-5283-account-create-fhrq4" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.513584 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7t5m\" (UniqueName: \"kubernetes.io/projected/866eeb81-622c-4ea2-a727-184e5e7d745c-kube-api-access-j7t5m\") pod \"cinder-5283-account-create-fhrq4\" (UID: \"866eeb81-622c-4ea2-a727-184e5e7d745c\") " pod="openstack/cinder-5283-account-create-fhrq4" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.520837 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-fhhlw"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.521816 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.524186 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.524376 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.524701 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.524877 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-llx85" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.536992 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-fhhlw"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.546352 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rm8b\" (UniqueName: \"kubernetes.io/projected/c85f6518-6046-441c-afae-4bd797cb807f-kube-api-access-9rm8b\") pod \"barbican-db-create-w9lth\" (UID: \"c85f6518-6046-441c-afae-4bd797cb807f\") " pod="openstack/barbican-db-create-w9lth" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.569834 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-cvwzv"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.571407 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-cvwzv" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.574895 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7t5m\" (UniqueName: \"kubernetes.io/projected/866eeb81-622c-4ea2-a727-184e5e7d745c-kube-api-access-j7t5m\") pod \"cinder-5283-account-create-fhrq4\" (UID: \"866eeb81-622c-4ea2-a727-184e5e7d745c\") " pod="openstack/cinder-5283-account-create-fhrq4" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.584432 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-cvwzv"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.595145 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-w9lth" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.604915 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5283-account-create-fhrq4" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.615956 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-operator-scripts\") pod \"barbican-a00b-account-create-dmk77\" (UID: \"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8\") " pod="openstack/barbican-a00b-account-create-dmk77" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.616010 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzfr2\" (UniqueName: \"kubernetes.io/projected/ba05a62d-df27-4947-8b1a-bf6410f576a9-kube-api-access-lzfr2\") pod \"keystone-db-sync-fhhlw\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.616065 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-combined-ca-bundle\") pod \"keystone-db-sync-fhhlw\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.616123 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-config-data\") pod \"keystone-db-sync-fhhlw\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.616162 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flfph\" (UniqueName: \"kubernetes.io/projected/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-kube-api-access-flfph\") pod \"barbican-a00b-account-create-dmk77\" (UID: \"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8\") " pod="openstack/barbican-a00b-account-create-dmk77" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.619829 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-operator-scripts\") pod \"barbican-a00b-account-create-dmk77\" (UID: \"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8\") " pod="openstack/barbican-a00b-account-create-dmk77" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.648003 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flfph\" (UniqueName: \"kubernetes.io/projected/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-kube-api-access-flfph\") pod \"barbican-a00b-account-create-dmk77\" (UID: \"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8\") " pod="openstack/barbican-a00b-account-create-dmk77" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.667545 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-876a-account-create-45826"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.671487 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-876a-account-create-45826" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.673997 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.702035 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-876a-account-create-45826"] Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.728021 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a00b-account-create-dmk77" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.748474 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-operator-scripts\") pod \"neutron-db-create-cvwzv\" (UID: \"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6\") " pod="openstack/neutron-db-create-cvwzv" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.748621 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzfr2\" (UniqueName: \"kubernetes.io/projected/ba05a62d-df27-4947-8b1a-bf6410f576a9-kube-api-access-lzfr2\") pod \"keystone-db-sync-fhhlw\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.748658 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-combined-ca-bundle\") pod \"keystone-db-sync-fhhlw\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.748700 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-config-data\") pod \"keystone-db-sync-fhhlw\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.748727 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbkrf\" (UniqueName: \"kubernetes.io/projected/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-kube-api-access-nbkrf\") pod \"neutron-db-create-cvwzv\" (UID: \"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6\") " pod="openstack/neutron-db-create-cvwzv" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.761096 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-config-data\") pod \"keystone-db-sync-fhhlw\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.770258 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-combined-ca-bundle\") pod \"keystone-db-sync-fhhlw\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.783559 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzfr2\" (UniqueName: \"kubernetes.io/projected/ba05a62d-df27-4947-8b1a-bf6410f576a9-kube-api-access-lzfr2\") pod \"keystone-db-sync-fhhlw\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.851905 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whtxn\" (UniqueName: \"kubernetes.io/projected/281e34b7-f570-479f-ac9d-0e2b98d3f24f-kube-api-access-whtxn\") pod \"neutron-876a-account-create-45826\" (UID: \"281e34b7-f570-479f-ac9d-0e2b98d3f24f\") " pod="openstack/neutron-876a-account-create-45826" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.852562 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbkrf\" (UniqueName: \"kubernetes.io/projected/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-kube-api-access-nbkrf\") pod \"neutron-db-create-cvwzv\" (UID: \"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6\") " pod="openstack/neutron-db-create-cvwzv" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.852712 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-operator-scripts\") pod \"neutron-db-create-cvwzv\" (UID: \"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6\") " pod="openstack/neutron-db-create-cvwzv" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.852877 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/281e34b7-f570-479f-ac9d-0e2b98d3f24f-operator-scripts\") pod \"neutron-876a-account-create-45826\" (UID: \"281e34b7-f570-479f-ac9d-0e2b98d3f24f\") " pod="openstack/neutron-876a-account-create-45826" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.854205 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-operator-scripts\") pod \"neutron-db-create-cvwzv\" (UID: \"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6\") " pod="openstack/neutron-db-create-cvwzv" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.882161 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.908256 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbkrf\" (UniqueName: \"kubernetes.io/projected/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-kube-api-access-nbkrf\") pod \"neutron-db-create-cvwzv\" (UID: \"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6\") " pod="openstack/neutron-db-create-cvwzv" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.955390 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whtxn\" (UniqueName: \"kubernetes.io/projected/281e34b7-f570-479f-ac9d-0e2b98d3f24f-kube-api-access-whtxn\") pod \"neutron-876a-account-create-45826\" (UID: \"281e34b7-f570-479f-ac9d-0e2b98d3f24f\") " pod="openstack/neutron-876a-account-create-45826" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.955530 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/281e34b7-f570-479f-ac9d-0e2b98d3f24f-operator-scripts\") pod \"neutron-876a-account-create-45826\" (UID: \"281e34b7-f570-479f-ac9d-0e2b98d3f24f\") " pod="openstack/neutron-876a-account-create-45826" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.956432 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/281e34b7-f570-479f-ac9d-0e2b98d3f24f-operator-scripts\") pod \"neutron-876a-account-create-45826\" (UID: \"281e34b7-f570-479f-ac9d-0e2b98d3f24f\") " pod="openstack/neutron-876a-account-create-45826" Nov 24 01:28:15 crc kubenswrapper[4755]: I1124 01:28:15.976385 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whtxn\" (UniqueName: \"kubernetes.io/projected/281e34b7-f570-479f-ac9d-0e2b98d3f24f-kube-api-access-whtxn\") pod \"neutron-876a-account-create-45826\" (UID: \"281e34b7-f570-479f-ac9d-0e2b98d3f24f\") " pod="openstack/neutron-876a-account-create-45826" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.006166 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.022035 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-cvwzv" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.043270 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-876a-account-create-45826" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.057279 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjdmr\" (UniqueName: \"kubernetes.io/projected/ee2730ba-9a5e-48eb-be37-0cc250092218-kube-api-access-cjdmr\") pod \"ee2730ba-9a5e-48eb-be37-0cc250092218\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.057774 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run-ovn\") pod \"ee2730ba-9a5e-48eb-be37-0cc250092218\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.057844 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-log-ovn\") pod \"ee2730ba-9a5e-48eb-be37-0cc250092218\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.057911 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-scripts\") pod \"ee2730ba-9a5e-48eb-be37-0cc250092218\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.058083 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "ee2730ba-9a5e-48eb-be37-0cc250092218" (UID: "ee2730ba-9a5e-48eb-be37-0cc250092218"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.058149 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "ee2730ba-9a5e-48eb-be37-0cc250092218" (UID: "ee2730ba-9a5e-48eb-be37-0cc250092218"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.058849 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run\") pod \"ee2730ba-9a5e-48eb-be37-0cc250092218\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.058926 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-additional-scripts\") pod \"ee2730ba-9a5e-48eb-be37-0cc250092218\" (UID: \"ee2730ba-9a5e-48eb-be37-0cc250092218\") " Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.059473 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-scripts" (OuterVolumeSpecName: "scripts") pod "ee2730ba-9a5e-48eb-be37-0cc250092218" (UID: "ee2730ba-9a5e-48eb-be37-0cc250092218"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.059691 4755 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.059709 4755 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.060077 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.059965 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "ee2730ba-9a5e-48eb-be37-0cc250092218" (UID: "ee2730ba-9a5e-48eb-be37-0cc250092218"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.059989 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run" (OuterVolumeSpecName: "var-run") pod "ee2730ba-9a5e-48eb-be37-0cc250092218" (UID: "ee2730ba-9a5e-48eb-be37-0cc250092218"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.073167 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee2730ba-9a5e-48eb-be37-0cc250092218-kube-api-access-cjdmr" (OuterVolumeSpecName: "kube-api-access-cjdmr") pod "ee2730ba-9a5e-48eb-be37-0cc250092218" (UID: "ee2730ba-9a5e-48eb-be37-0cc250092218"). InnerVolumeSpecName "kube-api-access-cjdmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.162405 4755 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee2730ba-9a5e-48eb-be37-0cc250092218-var-run\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.162434 4755 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ee2730ba-9a5e-48eb-be37-0cc250092218-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.162445 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjdmr\" (UniqueName: \"kubernetes.io/projected/ee2730ba-9a5e-48eb-be37-0cc250092218-kube-api-access-cjdmr\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.216439 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-k42zb"] Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.403049 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5283-account-create-fhrq4"] Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.512187 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5283-account-create-fhrq4" event={"ID":"866eeb81-622c-4ea2-a727-184e5e7d745c","Type":"ContainerStarted","Data":"dcffe0f88f409241794937490be430d87ecae82bcfedf9112a1478ecb5890567"} Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.538573 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-k42zb" event={"ID":"d8b666dc-55f4-4e9f-9768-147815005e1e","Type":"ContainerStarted","Data":"631b6c9e5863be7e12b9fa348a729d94f57a54ba7c343b25bcaadf1839ac6f8c"} Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.538630 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-k42zb" event={"ID":"d8b666dc-55f4-4e9f-9768-147815005e1e","Type":"ContainerStarted","Data":"b162a0f8c1afbb1eddc0de43a20ab260652d6d2f2447dda8267af1785dfcf0d7"} Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.547333 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7tzgl-config-7cbwn" event={"ID":"ee2730ba-9a5e-48eb-be37-0cc250092218","Type":"ContainerDied","Data":"dd8aa7e63910cadfc98a9cddc65d7096fb47da7b26b9237a4f4a7bd4cc396ad6"} Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.547371 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd8aa7e63910cadfc98a9cddc65d7096fb47da7b26b9237a4f4a7bd4cc396ad6" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.547430 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7tzgl-config-7cbwn" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.571901 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-w9lth"] Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.579815 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-a00b-account-create-dmk77"] Nov 24 01:28:16 crc kubenswrapper[4755]: W1124 01:28:16.580370 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3ad27b1_9595_4f03_b472_2dfdf7e70cc8.slice/crio-8b63344abd5ebc525463b96c9a113a242d579e92a3017602c8357198b91b7479 WatchSource:0}: Error finding container 8b63344abd5ebc525463b96c9a113a242d579e92a3017602c8357198b91b7479: Status 404 returned error can't find the container with id 8b63344abd5ebc525463b96c9a113a242d579e92a3017602c8357198b91b7479 Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.583927 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-k42zb" podStartSLOduration=1.5839136919999999 podStartE2EDuration="1.583913692s" podCreationTimestamp="2025-11-24 01:28:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:28:16.565356699 +0000 UTC m=+921.251422200" watchObservedRunningTime="2025-11-24 01:28:16.583913692 +0000 UTC m=+921.269979183" Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.663967 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-fhhlw"] Nov 24 01:28:16 crc kubenswrapper[4755]: W1124 01:28:16.674001 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba05a62d_df27_4947_8b1a_bf6410f576a9.slice/crio-c67c3332a4709f0e858aea0c0ecd97a8d74be9d2dd1bdd980f4787c5f6dff7c6 WatchSource:0}: Error finding container c67c3332a4709f0e858aea0c0ecd97a8d74be9d2dd1bdd980f4787c5f6dff7c6: Status 404 returned error can't find the container with id c67c3332a4709f0e858aea0c0ecd97a8d74be9d2dd1bdd980f4787c5f6dff7c6 Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.695233 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-cvwzv"] Nov 24 01:28:16 crc kubenswrapper[4755]: W1124 01:28:16.768453 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1b6e064_c1f0_4efd_9a54_5d1f7a4708b6.slice/crio-e72330dfac04893f43ad89649fc2db6df1a9f87a38c322c0201379fa8cea1554 WatchSource:0}: Error finding container e72330dfac04893f43ad89649fc2db6df1a9f87a38c322c0201379fa8cea1554: Status 404 returned error can't find the container with id e72330dfac04893f43ad89649fc2db6df1a9f87a38c322c0201379fa8cea1554 Nov 24 01:28:16 crc kubenswrapper[4755]: I1124 01:28:16.795286 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-876a-account-create-45826"] Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.002966 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-7tzgl-config-7cbwn"] Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.014951 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-7tzgl-config-7cbwn"] Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.559878 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fhhlw" event={"ID":"ba05a62d-df27-4947-8b1a-bf6410f576a9","Type":"ContainerStarted","Data":"c67c3332a4709f0e858aea0c0ecd97a8d74be9d2dd1bdd980f4787c5f6dff7c6"} Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.562019 4755 generic.go:334] "Generic (PLEG): container finished" podID="c3ad27b1-9595-4f03-b472-2dfdf7e70cc8" containerID="a3363271518c94ebc7932b074033ddf197971f75fd4a1b47f2b54f0ded3e7475" exitCode=0 Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.562091 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a00b-account-create-dmk77" event={"ID":"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8","Type":"ContainerDied","Data":"a3363271518c94ebc7932b074033ddf197971f75fd4a1b47f2b54f0ded3e7475"} Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.562120 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a00b-account-create-dmk77" event={"ID":"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8","Type":"ContainerStarted","Data":"8b63344abd5ebc525463b96c9a113a242d579e92a3017602c8357198b91b7479"} Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.564895 4755 generic.go:334] "Generic (PLEG): container finished" podID="866eeb81-622c-4ea2-a727-184e5e7d745c" containerID="7f947942acf76534330daf32a36284acadfad746d2fb9a8bc8bb97f23bdb8da5" exitCode=0 Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.565019 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5283-account-create-fhrq4" event={"ID":"866eeb81-622c-4ea2-a727-184e5e7d745c","Type":"ContainerDied","Data":"7f947942acf76534330daf32a36284acadfad746d2fb9a8bc8bb97f23bdb8da5"} Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.566695 4755 generic.go:334] "Generic (PLEG): container finished" podID="d8b666dc-55f4-4e9f-9768-147815005e1e" containerID="631b6c9e5863be7e12b9fa348a729d94f57a54ba7c343b25bcaadf1839ac6f8c" exitCode=0 Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.566760 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-k42zb" event={"ID":"d8b666dc-55f4-4e9f-9768-147815005e1e","Type":"ContainerDied","Data":"631b6c9e5863be7e12b9fa348a729d94f57a54ba7c343b25bcaadf1839ac6f8c"} Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.568897 4755 generic.go:334] "Generic (PLEG): container finished" podID="281e34b7-f570-479f-ac9d-0e2b98d3f24f" containerID="03fd48e4131bf585cbc0f7d53823d3623f20dd4ee33ec85fde122a7c9ebaed70" exitCode=0 Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.568945 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-876a-account-create-45826" event={"ID":"281e34b7-f570-479f-ac9d-0e2b98d3f24f","Type":"ContainerDied","Data":"03fd48e4131bf585cbc0f7d53823d3623f20dd4ee33ec85fde122a7c9ebaed70"} Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.568966 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-876a-account-create-45826" event={"ID":"281e34b7-f570-479f-ac9d-0e2b98d3f24f","Type":"ContainerStarted","Data":"42dffa16f19cd58ddf86630e582716e8c1efb498f963c13dc8b85ffdbb3d3ef0"} Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.570258 4755 generic.go:334] "Generic (PLEG): container finished" podID="c85f6518-6046-441c-afae-4bd797cb807f" containerID="95f68ef420aa4049198a419a3c649b87da4bbd4e0086d4b8fee3caae07daefdc" exitCode=0 Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.570305 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-w9lth" event={"ID":"c85f6518-6046-441c-afae-4bd797cb807f","Type":"ContainerDied","Data":"95f68ef420aa4049198a419a3c649b87da4bbd4e0086d4b8fee3caae07daefdc"} Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.570324 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-w9lth" event={"ID":"c85f6518-6046-441c-afae-4bd797cb807f","Type":"ContainerStarted","Data":"1d859dc1cd45f6f094f4d14fac69e76faac2b669bc67954b8a6b5894249802e4"} Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.571549 4755 generic.go:334] "Generic (PLEG): container finished" podID="a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6" containerID="70869526a2f3d6dcdf3530833b5417a288777956a15dc904555b139da5d304a9" exitCode=0 Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.571580 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-cvwzv" event={"ID":"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6","Type":"ContainerDied","Data":"70869526a2f3d6dcdf3530833b5417a288777956a15dc904555b139da5d304a9"} Nov 24 01:28:17 crc kubenswrapper[4755]: I1124 01:28:17.571612 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-cvwzv" event={"ID":"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6","Type":"ContainerStarted","Data":"e72330dfac04893f43ad89649fc2db6df1a9f87a38c322c0201379fa8cea1554"} Nov 24 01:28:18 crc kubenswrapper[4755]: I1124 01:28:18.019470 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee2730ba-9a5e-48eb-be37-0cc250092218" path="/var/lib/kubelet/pods/ee2730ba-9a5e-48eb-be37-0cc250092218/volumes" Nov 24 01:28:18 crc kubenswrapper[4755]: I1124 01:28:18.934743 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-876a-account-create-45826" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.062098 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/281e34b7-f570-479f-ac9d-0e2b98d3f24f-operator-scripts\") pod \"281e34b7-f570-479f-ac9d-0e2b98d3f24f\" (UID: \"281e34b7-f570-479f-ac9d-0e2b98d3f24f\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.062172 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whtxn\" (UniqueName: \"kubernetes.io/projected/281e34b7-f570-479f-ac9d-0e2b98d3f24f-kube-api-access-whtxn\") pod \"281e34b7-f570-479f-ac9d-0e2b98d3f24f\" (UID: \"281e34b7-f570-479f-ac9d-0e2b98d3f24f\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.062949 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/281e34b7-f570-479f-ac9d-0e2b98d3f24f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "281e34b7-f570-479f-ac9d-0e2b98d3f24f" (UID: "281e34b7-f570-479f-ac9d-0e2b98d3f24f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.070545 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/281e34b7-f570-479f-ac9d-0e2b98d3f24f-kube-api-access-whtxn" (OuterVolumeSpecName: "kube-api-access-whtxn") pod "281e34b7-f570-479f-ac9d-0e2b98d3f24f" (UID: "281e34b7-f570-479f-ac9d-0e2b98d3f24f"). InnerVolumeSpecName "kube-api-access-whtxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.143462 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a00b-account-create-dmk77" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.149709 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-cvwzv" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.162728 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-k42zb" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.163670 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/281e34b7-f570-479f-ac9d-0e2b98d3f24f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.163697 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whtxn\" (UniqueName: \"kubernetes.io/projected/281e34b7-f570-479f-ac9d-0e2b98d3f24f-kube-api-access-whtxn\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.164353 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5283-account-create-fhrq4" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.175788 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-w9lth" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.265026 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-operator-scripts\") pod \"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6\" (UID: \"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.265103 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8b666dc-55f4-4e9f-9768-147815005e1e-operator-scripts\") pod \"d8b666dc-55f4-4e9f-9768-147815005e1e\" (UID: \"d8b666dc-55f4-4e9f-9768-147815005e1e\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.265148 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbkrf\" (UniqueName: \"kubernetes.io/projected/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-kube-api-access-nbkrf\") pod \"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6\" (UID: \"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.265208 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7t5m\" (UniqueName: \"kubernetes.io/projected/866eeb81-622c-4ea2-a727-184e5e7d745c-kube-api-access-j7t5m\") pod \"866eeb81-622c-4ea2-a727-184e5e7d745c\" (UID: \"866eeb81-622c-4ea2-a727-184e5e7d745c\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.265271 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xrj5\" (UniqueName: \"kubernetes.io/projected/d8b666dc-55f4-4e9f-9768-147815005e1e-kube-api-access-4xrj5\") pod \"d8b666dc-55f4-4e9f-9768-147815005e1e\" (UID: \"d8b666dc-55f4-4e9f-9768-147815005e1e\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.265360 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flfph\" (UniqueName: \"kubernetes.io/projected/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-kube-api-access-flfph\") pod \"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8\" (UID: \"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.265384 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/866eeb81-622c-4ea2-a727-184e5e7d745c-operator-scripts\") pod \"866eeb81-622c-4ea2-a727-184e5e7d745c\" (UID: \"866eeb81-622c-4ea2-a727-184e5e7d745c\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.265424 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-operator-scripts\") pod \"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8\" (UID: \"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.265457 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6" (UID: "a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.265819 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.266203 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c3ad27b1-9595-4f03-b472-2dfdf7e70cc8" (UID: "c3ad27b1-9595-4f03-b472-2dfdf7e70cc8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.266223 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8b666dc-55f4-4e9f-9768-147815005e1e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d8b666dc-55f4-4e9f-9768-147815005e1e" (UID: "d8b666dc-55f4-4e9f-9768-147815005e1e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.266435 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/866eeb81-622c-4ea2-a727-184e5e7d745c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "866eeb81-622c-4ea2-a727-184e5e7d745c" (UID: "866eeb81-622c-4ea2-a727-184e5e7d745c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.269725 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/866eeb81-622c-4ea2-a727-184e5e7d745c-kube-api-access-j7t5m" (OuterVolumeSpecName: "kube-api-access-j7t5m") pod "866eeb81-622c-4ea2-a727-184e5e7d745c" (UID: "866eeb81-622c-4ea2-a727-184e5e7d745c"). InnerVolumeSpecName "kube-api-access-j7t5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.269764 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8b666dc-55f4-4e9f-9768-147815005e1e-kube-api-access-4xrj5" (OuterVolumeSpecName: "kube-api-access-4xrj5") pod "d8b666dc-55f4-4e9f-9768-147815005e1e" (UID: "d8b666dc-55f4-4e9f-9768-147815005e1e"). InnerVolumeSpecName "kube-api-access-4xrj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.270169 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-kube-api-access-nbkrf" (OuterVolumeSpecName: "kube-api-access-nbkrf") pod "a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6" (UID: "a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6"). InnerVolumeSpecName "kube-api-access-nbkrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.270287 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-kube-api-access-flfph" (OuterVolumeSpecName: "kube-api-access-flfph") pod "c3ad27b1-9595-4f03-b472-2dfdf7e70cc8" (UID: "c3ad27b1-9595-4f03-b472-2dfdf7e70cc8"). InnerVolumeSpecName "kube-api-access-flfph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.366371 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rm8b\" (UniqueName: \"kubernetes.io/projected/c85f6518-6046-441c-afae-4bd797cb807f-kube-api-access-9rm8b\") pod \"c85f6518-6046-441c-afae-4bd797cb807f\" (UID: \"c85f6518-6046-441c-afae-4bd797cb807f\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.366751 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c85f6518-6046-441c-afae-4bd797cb807f-operator-scripts\") pod \"c85f6518-6046-441c-afae-4bd797cb807f\" (UID: \"c85f6518-6046-441c-afae-4bd797cb807f\") " Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.367062 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8b666dc-55f4-4e9f-9768-147815005e1e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.367075 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbkrf\" (UniqueName: \"kubernetes.io/projected/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6-kube-api-access-nbkrf\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.367084 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7t5m\" (UniqueName: \"kubernetes.io/projected/866eeb81-622c-4ea2-a727-184e5e7d745c-kube-api-access-j7t5m\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.367093 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xrj5\" (UniqueName: \"kubernetes.io/projected/d8b666dc-55f4-4e9f-9768-147815005e1e-kube-api-access-4xrj5\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.367101 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flfph\" (UniqueName: \"kubernetes.io/projected/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-kube-api-access-flfph\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.367109 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/866eeb81-622c-4ea2-a727-184e5e7d745c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.367117 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.367148 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c85f6518-6046-441c-afae-4bd797cb807f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c85f6518-6046-441c-afae-4bd797cb807f" (UID: "c85f6518-6046-441c-afae-4bd797cb807f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.369289 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c85f6518-6046-441c-afae-4bd797cb807f-kube-api-access-9rm8b" (OuterVolumeSpecName: "kube-api-access-9rm8b") pod "c85f6518-6046-441c-afae-4bd797cb807f" (UID: "c85f6518-6046-441c-afae-4bd797cb807f"). InnerVolumeSpecName "kube-api-access-9rm8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.468858 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rm8b\" (UniqueName: \"kubernetes.io/projected/c85f6518-6046-441c-afae-4bd797cb807f-kube-api-access-9rm8b\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.468893 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c85f6518-6046-441c-afae-4bd797cb807f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.593121 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a00b-account-create-dmk77" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.593842 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a00b-account-create-dmk77" event={"ID":"c3ad27b1-9595-4f03-b472-2dfdf7e70cc8","Type":"ContainerDied","Data":"8b63344abd5ebc525463b96c9a113a242d579e92a3017602c8357198b91b7479"} Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.593886 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b63344abd5ebc525463b96c9a113a242d579e92a3017602c8357198b91b7479" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.597279 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5283-account-create-fhrq4" event={"ID":"866eeb81-622c-4ea2-a727-184e5e7d745c","Type":"ContainerDied","Data":"dcffe0f88f409241794937490be430d87ecae82bcfedf9112a1478ecb5890567"} Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.597308 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcffe0f88f409241794937490be430d87ecae82bcfedf9112a1478ecb5890567" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.597356 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5283-account-create-fhrq4" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.604747 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-k42zb" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.605806 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-k42zb" event={"ID":"d8b666dc-55f4-4e9f-9768-147815005e1e","Type":"ContainerDied","Data":"b162a0f8c1afbb1eddc0de43a20ab260652d6d2f2447dda8267af1785dfcf0d7"} Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.605857 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b162a0f8c1afbb1eddc0de43a20ab260652d6d2f2447dda8267af1785dfcf0d7" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.612233 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-876a-account-create-45826" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.612461 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-876a-account-create-45826" event={"ID":"281e34b7-f570-479f-ac9d-0e2b98d3f24f","Type":"ContainerDied","Data":"42dffa16f19cd58ddf86630e582716e8c1efb498f963c13dc8b85ffdbb3d3ef0"} Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.612516 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42dffa16f19cd58ddf86630e582716e8c1efb498f963c13dc8b85ffdbb3d3ef0" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.621907 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-w9lth" event={"ID":"c85f6518-6046-441c-afae-4bd797cb807f","Type":"ContainerDied","Data":"1d859dc1cd45f6f094f4d14fac69e76faac2b669bc67954b8a6b5894249802e4"} Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.621946 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d859dc1cd45f6f094f4d14fac69e76faac2b669bc67954b8a6b5894249802e4" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.622001 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-w9lth" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.624592 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-cvwzv" event={"ID":"a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6","Type":"ContainerDied","Data":"e72330dfac04893f43ad89649fc2db6df1a9f87a38c322c0201379fa8cea1554"} Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.624655 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e72330dfac04893f43ad89649fc2db6df1a9f87a38c322c0201379fa8cea1554" Nov 24 01:28:19 crc kubenswrapper[4755]: I1124 01:28:19.624721 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-cvwzv" Nov 24 01:28:20 crc kubenswrapper[4755]: I1124 01:28:20.115789 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:20 crc kubenswrapper[4755]: I1124 01:28:20.180736 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7gzpp"] Nov 24 01:28:20 crc kubenswrapper[4755]: I1124 01:28:20.181010 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-7gzpp" podUID="a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" containerName="dnsmasq-dns" containerID="cri-o://23452913ad37758af62107f2d6c27c168db80a2d9d06a76d76fc10c30136ab8b" gracePeriod=10 Nov 24 01:28:20 crc kubenswrapper[4755]: I1124 01:28:20.580188 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-7gzpp" podUID="a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Nov 24 01:28:20 crc kubenswrapper[4755]: I1124 01:28:20.634663 4755 generic.go:334] "Generic (PLEG): container finished" podID="6dee4f3a-cb77-4137-b459-9c1be1a005ef" containerID="f59b4c5cd502b8565f2159fdb02163c0d0fd6805f2b214da42cfaa7873855440" exitCode=0 Nov 24 01:28:20 crc kubenswrapper[4755]: I1124 01:28:20.634847 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vtf8f" event={"ID":"6dee4f3a-cb77-4137-b459-9c1be1a005ef","Type":"ContainerDied","Data":"f59b4c5cd502b8565f2159fdb02163c0d0fd6805f2b214da42cfaa7873855440"} Nov 24 01:28:20 crc kubenswrapper[4755]: I1124 01:28:20.638198 4755 generic.go:334] "Generic (PLEG): container finished" podID="a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" containerID="23452913ad37758af62107f2d6c27c168db80a2d9d06a76d76fc10c30136ab8b" exitCode=0 Nov 24 01:28:20 crc kubenswrapper[4755]: I1124 01:28:20.638245 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7gzpp" event={"ID":"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c","Type":"ContainerDied","Data":"23452913ad37758af62107f2d6c27c168db80a2d9d06a76d76fc10c30136ab8b"} Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.114464 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vtf8f" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.190661 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.210843 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-config-data\") pod \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.210939 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-combined-ca-bundle\") pod \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.211050 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-db-sync-config-data\") pod \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.211133 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjk7d\" (UniqueName: \"kubernetes.io/projected/6dee4f3a-cb77-4137-b459-9c1be1a005ef-kube-api-access-hjk7d\") pod \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\" (UID: \"6dee4f3a-cb77-4137-b459-9c1be1a005ef\") " Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.250812 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dee4f3a-cb77-4137-b459-9c1be1a005ef-kube-api-access-hjk7d" (OuterVolumeSpecName: "kube-api-access-hjk7d") pod "6dee4f3a-cb77-4137-b459-9c1be1a005ef" (UID: "6dee4f3a-cb77-4137-b459-9c1be1a005ef"). InnerVolumeSpecName "kube-api-access-hjk7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.250962 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6dee4f3a-cb77-4137-b459-9c1be1a005ef" (UID: "6dee4f3a-cb77-4137-b459-9c1be1a005ef"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.254461 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6dee4f3a-cb77-4137-b459-9c1be1a005ef" (UID: "6dee4f3a-cb77-4137-b459-9c1be1a005ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.277579 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-config-data" (OuterVolumeSpecName: "config-data") pod "6dee4f3a-cb77-4137-b459-9c1be1a005ef" (UID: "6dee4f3a-cb77-4137-b459-9c1be1a005ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.312169 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-config\") pod \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.312209 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-nb\") pod \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.312248 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-dns-svc\") pod \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.312344 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-sb\") pod \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.312427 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k678p\" (UniqueName: \"kubernetes.io/projected/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-kube-api-access-k678p\") pod \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\" (UID: \"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c\") " Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.312713 4755 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.312729 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjk7d\" (UniqueName: \"kubernetes.io/projected/6dee4f3a-cb77-4137-b459-9c1be1a005ef-kube-api-access-hjk7d\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.312740 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.312747 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dee4f3a-cb77-4137-b459-9c1be1a005ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.316755 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-kube-api-access-k678p" (OuterVolumeSpecName: "kube-api-access-k678p") pod "a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" (UID: "a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c"). InnerVolumeSpecName "kube-api-access-k678p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.350018 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" (UID: "a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.352590 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" (UID: "a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.353805 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" (UID: "a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.354840 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-config" (OuterVolumeSpecName: "config") pod "a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" (UID: "a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.414758 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k678p\" (UniqueName: \"kubernetes.io/projected/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-kube-api-access-k678p\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.414805 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.414817 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.414824 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.414835 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.662048 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7gzpp" event={"ID":"a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c","Type":"ContainerDied","Data":"0883fc84ad362db9d3f46534edd882003896d9e10aa8237bd7c9605438da3f33"} Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.662125 4755 scope.go:117] "RemoveContainer" containerID="23452913ad37758af62107f2d6c27c168db80a2d9d06a76d76fc10c30136ab8b" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.662297 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7gzpp" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.675724 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fhhlw" event={"ID":"ba05a62d-df27-4947-8b1a-bf6410f576a9","Type":"ContainerStarted","Data":"0a3bad00098562ade0fcf5771bbbdb5b5bf9f5b043ae4aa0486f7624ecaae205"} Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.687071 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vtf8f" event={"ID":"6dee4f3a-cb77-4137-b459-9c1be1a005ef","Type":"ContainerDied","Data":"e3ff0415a1a18664bea73793ba706e1cb37d56a1ab2c46c8896bb6405dc0d911"} Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.687129 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3ff0415a1a18664bea73793ba706e1cb37d56a1ab2c46c8896bb6405dc0d911" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.687380 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vtf8f" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.709874 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-fhhlw" podStartSLOduration=2.414117863 podStartE2EDuration="7.709849636s" podCreationTimestamp="2025-11-24 01:28:15 +0000 UTC" firstStartedPulling="2025-11-24 01:28:16.680902281 +0000 UTC m=+921.366967782" lastFinishedPulling="2025-11-24 01:28:21.976634054 +0000 UTC m=+926.662699555" observedRunningTime="2025-11-24 01:28:22.694758341 +0000 UTC m=+927.380823842" watchObservedRunningTime="2025-11-24 01:28:22.709849636 +0000 UTC m=+927.395915157" Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.723279 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7gzpp"] Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.730962 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7gzpp"] Nov 24 01:28:22 crc kubenswrapper[4755]: I1124 01:28:22.732167 4755 scope.go:117] "RemoveContainer" containerID="dd9f1e7ccce2b941c3befc1271f0411bcec3895fbac71fd0d49979c25d0a90fb" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021044 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-d6jwf"] Nov 24 01:28:23 crc kubenswrapper[4755]: E1124 01:28:23.021479 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" containerName="dnsmasq-dns" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021502 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" containerName="dnsmasq-dns" Nov 24 01:28:23 crc kubenswrapper[4755]: E1124 01:28:23.021533 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dee4f3a-cb77-4137-b459-9c1be1a005ef" containerName="glance-db-sync" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021540 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dee4f3a-cb77-4137-b459-9c1be1a005ef" containerName="glance-db-sync" Nov 24 01:28:23 crc kubenswrapper[4755]: E1124 01:28:23.021550 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="281e34b7-f570-479f-ac9d-0e2b98d3f24f" containerName="mariadb-account-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021558 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="281e34b7-f570-479f-ac9d-0e2b98d3f24f" containerName="mariadb-account-create" Nov 24 01:28:23 crc kubenswrapper[4755]: E1124 01:28:23.021570 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="866eeb81-622c-4ea2-a727-184e5e7d745c" containerName="mariadb-account-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021578 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="866eeb81-622c-4ea2-a727-184e5e7d745c" containerName="mariadb-account-create" Nov 24 01:28:23 crc kubenswrapper[4755]: E1124 01:28:23.021589 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3ad27b1-9595-4f03-b472-2dfdf7e70cc8" containerName="mariadb-account-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021596 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3ad27b1-9595-4f03-b472-2dfdf7e70cc8" containerName="mariadb-account-create" Nov 24 01:28:23 crc kubenswrapper[4755]: E1124 01:28:23.021626 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8b666dc-55f4-4e9f-9768-147815005e1e" containerName="mariadb-database-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021634 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8b666dc-55f4-4e9f-9768-147815005e1e" containerName="mariadb-database-create" Nov 24 01:28:23 crc kubenswrapper[4755]: E1124 01:28:23.021656 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6" containerName="mariadb-database-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021664 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6" containerName="mariadb-database-create" Nov 24 01:28:23 crc kubenswrapper[4755]: E1124 01:28:23.021677 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee2730ba-9a5e-48eb-be37-0cc250092218" containerName="ovn-config" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021684 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee2730ba-9a5e-48eb-be37-0cc250092218" containerName="ovn-config" Nov 24 01:28:23 crc kubenswrapper[4755]: E1124 01:28:23.021695 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c85f6518-6046-441c-afae-4bd797cb807f" containerName="mariadb-database-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021701 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c85f6518-6046-441c-afae-4bd797cb807f" containerName="mariadb-database-create" Nov 24 01:28:23 crc kubenswrapper[4755]: E1124 01:28:23.021708 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" containerName="init" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021716 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" containerName="init" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021910 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" containerName="dnsmasq-dns" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021927 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c85f6518-6046-441c-afae-4bd797cb807f" containerName="mariadb-database-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021945 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee2730ba-9a5e-48eb-be37-0cc250092218" containerName="ovn-config" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021959 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6" containerName="mariadb-database-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021969 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dee4f3a-cb77-4137-b459-9c1be1a005ef" containerName="glance-db-sync" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021984 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8b666dc-55f4-4e9f-9768-147815005e1e" containerName="mariadb-database-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.021996 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="281e34b7-f570-479f-ac9d-0e2b98d3f24f" containerName="mariadb-account-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.022005 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3ad27b1-9595-4f03-b472-2dfdf7e70cc8" containerName="mariadb-account-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.022013 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="866eeb81-622c-4ea2-a727-184e5e7d745c" containerName="mariadb-account-create" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.025030 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.034503 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-d6jwf"] Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.137364 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-config\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.137719 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.137756 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.137786 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.137885 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmtzg\" (UniqueName: \"kubernetes.io/projected/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-kube-api-access-nmtzg\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.137943 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.239223 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-config\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.239289 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.239334 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.239360 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.239399 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmtzg\" (UniqueName: \"kubernetes.io/projected/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-kube-api-access-nmtzg\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.239436 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.240186 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-config\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.240243 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.240804 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.240823 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.240998 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.262722 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmtzg\" (UniqueName: \"kubernetes.io/projected/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-kube-api-access-nmtzg\") pod \"dnsmasq-dns-7ff5475cc9-d6jwf\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.341439 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:23 crc kubenswrapper[4755]: I1124 01:28:23.878324 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-d6jwf"] Nov 24 01:28:23 crc kubenswrapper[4755]: W1124 01:28:23.879513 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84a99ebb_2bb7_4562_87b2_e1f5467b51dc.slice/crio-fb35c7627518d65c7425c768f1f4345dc09c2d41ad08cb49daf4fb5fe2052780 WatchSource:0}: Error finding container fb35c7627518d65c7425c768f1f4345dc09c2d41ad08cb49daf4fb5fe2052780: Status 404 returned error can't find the container with id fb35c7627518d65c7425c768f1f4345dc09c2d41ad08cb49daf4fb5fe2052780 Nov 24 01:28:24 crc kubenswrapper[4755]: I1124 01:28:24.023570 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c" path="/var/lib/kubelet/pods/a4c9f27e-2ddf-443d-aa39-cbd02f43fc5c/volumes" Nov 24 01:28:24 crc kubenswrapper[4755]: I1124 01:28:24.706222 4755 generic.go:334] "Generic (PLEG): container finished" podID="84a99ebb-2bb7-4562-87b2-e1f5467b51dc" containerID="0d040e626ea303c02f254781de8453c7eae5f73f34d194ae8a6ec720f2ba12ca" exitCode=0 Nov 24 01:28:24 crc kubenswrapper[4755]: I1124 01:28:24.706278 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" event={"ID":"84a99ebb-2bb7-4562-87b2-e1f5467b51dc","Type":"ContainerDied","Data":"0d040e626ea303c02f254781de8453c7eae5f73f34d194ae8a6ec720f2ba12ca"} Nov 24 01:28:24 crc kubenswrapper[4755]: I1124 01:28:24.706483 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" event={"ID":"84a99ebb-2bb7-4562-87b2-e1f5467b51dc","Type":"ContainerStarted","Data":"fb35c7627518d65c7425c768f1f4345dc09c2d41ad08cb49daf4fb5fe2052780"} Nov 24 01:28:25 crc kubenswrapper[4755]: I1124 01:28:25.716407 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" event={"ID":"84a99ebb-2bb7-4562-87b2-e1f5467b51dc","Type":"ContainerStarted","Data":"80914558c2a9e76df2be248eb3a6db4af0431c26148e19a11a6bab7c7b6472df"} Nov 24 01:28:25 crc kubenswrapper[4755]: I1124 01:28:25.716771 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:25 crc kubenswrapper[4755]: I1124 01:28:25.717932 4755 generic.go:334] "Generic (PLEG): container finished" podID="ba05a62d-df27-4947-8b1a-bf6410f576a9" containerID="0a3bad00098562ade0fcf5771bbbdb5b5bf9f5b043ae4aa0486f7624ecaae205" exitCode=0 Nov 24 01:28:25 crc kubenswrapper[4755]: I1124 01:28:25.717965 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fhhlw" event={"ID":"ba05a62d-df27-4947-8b1a-bf6410f576a9","Type":"ContainerDied","Data":"0a3bad00098562ade0fcf5771bbbdb5b5bf9f5b043ae4aa0486f7624ecaae205"} Nov 24 01:28:25 crc kubenswrapper[4755]: I1124 01:28:25.766408 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" podStartSLOduration=3.766385022 podStartE2EDuration="3.766385022s" podCreationTimestamp="2025-11-24 01:28:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:28:25.741247025 +0000 UTC m=+930.427312526" watchObservedRunningTime="2025-11-24 01:28:25.766385022 +0000 UTC m=+930.452450543" Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.106287 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.206516 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-config-data\") pod \"ba05a62d-df27-4947-8b1a-bf6410f576a9\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.206645 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzfr2\" (UniqueName: \"kubernetes.io/projected/ba05a62d-df27-4947-8b1a-bf6410f576a9-kube-api-access-lzfr2\") pod \"ba05a62d-df27-4947-8b1a-bf6410f576a9\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.206701 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-combined-ca-bundle\") pod \"ba05a62d-df27-4947-8b1a-bf6410f576a9\" (UID: \"ba05a62d-df27-4947-8b1a-bf6410f576a9\") " Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.213870 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba05a62d-df27-4947-8b1a-bf6410f576a9-kube-api-access-lzfr2" (OuterVolumeSpecName: "kube-api-access-lzfr2") pod "ba05a62d-df27-4947-8b1a-bf6410f576a9" (UID: "ba05a62d-df27-4947-8b1a-bf6410f576a9"). InnerVolumeSpecName "kube-api-access-lzfr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.236488 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba05a62d-df27-4947-8b1a-bf6410f576a9" (UID: "ba05a62d-df27-4947-8b1a-bf6410f576a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.245643 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-config-data" (OuterVolumeSpecName: "config-data") pod "ba05a62d-df27-4947-8b1a-bf6410f576a9" (UID: "ba05a62d-df27-4947-8b1a-bf6410f576a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.308669 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzfr2\" (UniqueName: \"kubernetes.io/projected/ba05a62d-df27-4947-8b1a-bf6410f576a9-kube-api-access-lzfr2\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.308719 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.308738 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba05a62d-df27-4947-8b1a-bf6410f576a9-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.735336 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fhhlw" event={"ID":"ba05a62d-df27-4947-8b1a-bf6410f576a9","Type":"ContainerDied","Data":"c67c3332a4709f0e858aea0c0ecd97a8d74be9d2dd1bdd980f4787c5f6dff7c6"} Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.735384 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fhhlw" Nov 24 01:28:27 crc kubenswrapper[4755]: I1124 01:28:27.735408 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c67c3332a4709f0e858aea0c0ecd97a8d74be9d2dd1bdd980f4787c5f6dff7c6" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.005964 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-d6jwf"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.006170 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" podUID="84a99ebb-2bb7-4562-87b2-e1f5467b51dc" containerName="dnsmasq-dns" containerID="cri-o://80914558c2a9e76df2be248eb3a6db4af0431c26148e19a11a6bab7c7b6472df" gracePeriod=10 Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.046856 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-c6z97"] Nov 24 01:28:28 crc kubenswrapper[4755]: E1124 01:28:28.047208 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba05a62d-df27-4947-8b1a-bf6410f576a9" containerName="keystone-db-sync" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.047226 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba05a62d-df27-4947-8b1a-bf6410f576a9" containerName="keystone-db-sync" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.047390 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba05a62d-df27-4947-8b1a-bf6410f576a9" containerName="keystone-db-sync" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.048231 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.068949 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-mfpvq"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.074919 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.079411 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.079968 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.080188 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.080373 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-llx85" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.086508 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-mfpvq"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.098158 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.120324 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-c6z97"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.148513 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-combined-ca-bundle\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.148670 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.148751 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-fernet-keys\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.148842 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjtc6\" (UniqueName: \"kubernetes.io/projected/261fa528-26f9-4f97-93ba-dd0f4a7e6260-kube-api-access-hjtc6\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.148922 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-credential-keys\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.149005 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-config-data\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.149097 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.149168 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnl9m\" (UniqueName: \"kubernetes.io/projected/6354cb83-933d-4b3f-b922-ce9e4f94e123-kube-api-access-fnl9m\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.149232 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.149308 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.149393 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-scripts\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.149470 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-config\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.246594 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6d767c7b5c-n5rr5"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.248024 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.256807 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-scripts\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257128 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-config\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257218 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9005ec18-506f-4b7c-a06c-7d4d619e3732-logs\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257311 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-combined-ca-bundle\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257460 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czm7j\" (UniqueName: \"kubernetes.io/projected/9005ec18-506f-4b7c-a06c-7d4d619e3732-kube-api-access-czm7j\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257530 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-config-data\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257579 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257645 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-fernet-keys\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257669 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjtc6\" (UniqueName: \"kubernetes.io/projected/261fa528-26f9-4f97-93ba-dd0f4a7e6260-kube-api-access-hjtc6\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257695 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-credential-keys\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257767 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-config-data\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257821 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-scripts\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257883 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9005ec18-506f-4b7c-a06c-7d4d619e3732-horizon-secret-key\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257937 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257976 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnl9m\" (UniqueName: \"kubernetes.io/projected/6354cb83-933d-4b3f-b922-ce9e4f94e123-kube-api-access-fnl9m\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.258000 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.258033 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.266922 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d767c7b5c-n5rr5"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257819 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-6pxlw" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.257999 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.258079 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.263304 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.271891 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-scripts\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.277418 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-config\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.280462 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.280684 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.281069 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.281512 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-combined-ca-bundle\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.281828 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.297838 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-fernet-keys\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.298479 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-config-data\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.300048 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-credential-keys\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.312032 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnl9m\" (UniqueName: \"kubernetes.io/projected/6354cb83-933d-4b3f-b922-ce9e4f94e123-kube-api-access-fnl9m\") pod \"keystone-bootstrap-mfpvq\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.313461 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjtc6\" (UniqueName: \"kubernetes.io/projected/261fa528-26f9-4f97-93ba-dd0f4a7e6260-kube-api-access-hjtc6\") pod \"dnsmasq-dns-5c5cc7c5ff-c6z97\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.351234 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-rhwbp"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.352339 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.360101 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-4jcwk" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.360441 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.361953 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-scripts\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.362011 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdb5j\" (UniqueName: \"kubernetes.io/projected/aea62103-9b85-495d-bb71-3c69c02a3000-kube-api-access-pdb5j\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.362047 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-scripts\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.362084 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9005ec18-506f-4b7c-a06c-7d4d619e3732-horizon-secret-key\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.362114 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-db-sync-config-data\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.362149 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-config-data\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.362203 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9005ec18-506f-4b7c-a06c-7d4d619e3732-logs\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.362247 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czm7j\" (UniqueName: \"kubernetes.io/projected/9005ec18-506f-4b7c-a06c-7d4d619e3732-kube-api-access-czm7j\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.362271 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-combined-ca-bundle\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.362298 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-config-data\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.362347 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aea62103-9b85-495d-bb71-3c69c02a3000-etc-machine-id\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.364569 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-scripts\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.364935 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9005ec18-506f-4b7c-a06c-7d4d619e3732-logs\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.365957 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-config-data\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.368214 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.372052 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9005ec18-506f-4b7c-a06c-7d4d619e3732-horizon-secret-key\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.373671 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-rhwbp"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.410783 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.411171 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-65g8w"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.412161 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.416279 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.416809 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2kjxr" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.416855 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.420172 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.427888 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czm7j\" (UniqueName: \"kubernetes.io/projected/9005ec18-506f-4b7c-a06c-7d4d619e3732-kube-api-access-czm7j\") pod \"horizon-6d767c7b5c-n5rr5\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.453736 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.455992 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.460130 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.461785 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.463715 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-run-httpd\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.463781 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-log-httpd\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.463811 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-db-sync-config-data\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.463841 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-config-data\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.463879 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2srq\" (UniqueName: \"kubernetes.io/projected/a7e96f37-574f-4900-88f9-33dc41179807-kube-api-access-g2srq\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.463913 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.463947 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-config-data\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.463969 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-config\") pod \"neutron-db-sync-65g8w\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.464015 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m7tp\" (UniqueName: \"kubernetes.io/projected/f90a48fa-6911-4df9-a1e8-d64ba7547daf-kube-api-access-7m7tp\") pod \"neutron-db-sync-65g8w\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.464038 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-combined-ca-bundle\") pod \"neutron-db-sync-65g8w\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.464074 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-combined-ca-bundle\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.464117 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.464158 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aea62103-9b85-495d-bb71-3c69c02a3000-etc-machine-id\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.464182 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-scripts\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.464204 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-scripts\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.464224 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdb5j\" (UniqueName: \"kubernetes.io/projected/aea62103-9b85-495d-bb71-3c69c02a3000-kube-api-access-pdb5j\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.478882 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aea62103-9b85-495d-bb71-3c69c02a3000-etc-machine-id\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.489127 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-config-data\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.492144 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-combined-ca-bundle\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.492200 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-65g8w"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.505157 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-db-sync-config-data\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.506181 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-scripts\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.507274 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdb5j\" (UniqueName: \"kubernetes.io/projected/aea62103-9b85-495d-bb71-3c69c02a3000-kube-api-access-pdb5j\") pod \"cinder-db-sync-rhwbp\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.514709 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-tgz4r"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.521912 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.526643 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.536943 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.537197 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-j78z7" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.537325 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-c6z97"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.566314 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-log-httpd\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.566710 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2srq\" (UniqueName: \"kubernetes.io/projected/a7e96f37-574f-4900-88f9-33dc41179807-kube-api-access-g2srq\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.566744 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.567155 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-log-httpd\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.567983 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-config-data\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.568018 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-config\") pod \"neutron-db-sync-65g8w\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.568081 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m7tp\" (UniqueName: \"kubernetes.io/projected/f90a48fa-6911-4df9-a1e8-d64ba7547daf-kube-api-access-7m7tp\") pod \"neutron-db-sync-65g8w\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.568109 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-combined-ca-bundle\") pod \"neutron-db-sync-65g8w\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.568164 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.568255 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-scripts\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.568296 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-run-httpd\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.568636 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-run-httpd\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.581026 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-config-data\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.583920 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-config\") pod \"neutron-db-sync-65g8w\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.585639 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-combined-ca-bundle\") pod \"neutron-db-sync-65g8w\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.587893 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.605990 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.606891 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-scripts\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.609495 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m7tp\" (UniqueName: \"kubernetes.io/projected/f90a48fa-6911-4df9-a1e8-d64ba7547daf-kube-api-access-7m7tp\") pod \"neutron-db-sync-65g8w\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.610019 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.613410 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2srq\" (UniqueName: \"kubernetes.io/projected/a7e96f37-574f-4900-88f9-33dc41179807-kube-api-access-g2srq\") pod \"ceilometer-0\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.624664 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-tgz4r"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.633784 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5f5b8cffb5-6gsfn"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.635095 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.651425 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-srjbc"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.652592 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.655755 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.655892 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-9kkj6" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.660921 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-27drm"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.664927 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.667808 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f5b8cffb5-6gsfn"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.670352 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpxxc\" (UniqueName: \"kubernetes.io/projected/ec11ae96-46e1-47a2-ae19-61941253ce7c-kube-api-access-gpxxc\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.670448 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-config-data\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.671335 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec11ae96-46e1-47a2-ae19-61941253ce7c-logs\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.671418 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-scripts\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.671454 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-combined-ca-bundle\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.693854 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-srjbc"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.702836 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-27drm"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.707979 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.709847 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.717981 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.723700 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.724092 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.724373 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.724381 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-7g7gm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.727138 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.775291 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.777844 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.805732 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811421 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpxxc\" (UniqueName: \"kubernetes.io/projected/ec11ae96-46e1-47a2-ae19-61941253ce7c-kube-api-access-gpxxc\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811506 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811537 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b688652-b82e-48df-8ffd-8d8234672564-logs\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811563 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-config\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811656 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-db-sync-config-data\") pod \"barbican-db-sync-srjbc\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811692 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-config-data\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811729 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811764 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec11ae96-46e1-47a2-ae19-61941253ce7c-logs\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811809 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811834 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-config-data\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811856 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-scripts\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811884 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-scripts\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811927 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-combined-ca-bundle\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811950 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l2ss\" (UniqueName: \"kubernetes.io/projected/3b688652-b82e-48df-8ffd-8d8234672564-kube-api-access-5l2ss\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.811968 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsxjk\" (UniqueName: \"kubernetes.io/projected/19be7f69-ce3a-4c28-8934-885d997016ff-kube-api-access-xsxjk\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.812049 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.812077 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-combined-ca-bundle\") pod \"barbican-db-sync-srjbc\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.812108 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3b688652-b82e-48df-8ffd-8d8234672564-horizon-secret-key\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.812148 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-485q7\" (UniqueName: \"kubernetes.io/projected/2bed5952-1a88-4314-befd-bb76c5431cdd-kube-api-access-485q7\") pod \"barbican-db-sync-srjbc\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.814228 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.814411 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.815597 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec11ae96-46e1-47a2-ae19-61941253ce7c-logs\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.840275 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.902844 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-config-data\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.912413 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpxxc\" (UniqueName: \"kubernetes.io/projected/ec11ae96-46e1-47a2-ae19-61941253ce7c-kube-api-access-gpxxc\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.913311 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-scripts\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.913958 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-combined-ca-bundle\") pod \"placement-db-sync-tgz4r\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.914485 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.914551 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-config-data\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.929862 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.930525 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tgz4r" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.931418 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.933371 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-config-data\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.934230 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.935597 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-scripts\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.935687 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-scripts\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.935788 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsxjk\" (UniqueName: \"kubernetes.io/projected/19be7f69-ce3a-4c28-8934-885d997016ff-kube-api-access-xsxjk\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.935812 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l2ss\" (UniqueName: \"kubernetes.io/projected/3b688652-b82e-48df-8ffd-8d8234672564-kube-api-access-5l2ss\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.941555 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.941621 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-combined-ca-bundle\") pod \"barbican-db-sync-srjbc\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.941664 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3b688652-b82e-48df-8ffd-8d8234672564-horizon-secret-key\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.941716 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-485q7\" (UniqueName: \"kubernetes.io/projected/2bed5952-1a88-4314-befd-bb76c5431cdd-kube-api-access-485q7\") pod \"barbican-db-sync-srjbc\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.941821 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.941853 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b688652-b82e-48df-8ffd-8d8234672564-logs\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.941882 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-config\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.941970 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-db-sync-config-data\") pod \"barbican-db-sync-srjbc\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.942034 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.945186 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.975786 4755 generic.go:334] "Generic (PLEG): container finished" podID="84a99ebb-2bb7-4562-87b2-e1f5467b51dc" containerID="80914558c2a9e76df2be248eb3a6db4af0431c26148e19a11a6bab7c7b6472df" exitCode=0 Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.975840 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" event={"ID":"84a99ebb-2bb7-4562-87b2-e1f5467b51dc","Type":"ContainerDied","Data":"80914558c2a9e76df2be248eb3a6db4af0431c26148e19a11a6bab7c7b6472df"} Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.975850 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-config\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.976246 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b688652-b82e-48df-8ffd-8d8234672564-logs\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.976676 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.987169 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsxjk\" (UniqueName: \"kubernetes.io/projected/19be7f69-ce3a-4c28-8934-885d997016ff-kube-api-access-xsxjk\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.988960 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-485q7\" (UniqueName: \"kubernetes.io/projected/2bed5952-1a88-4314-befd-bb76c5431cdd-kube-api-access-485q7\") pod \"barbican-db-sync-srjbc\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.989353 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l2ss\" (UniqueName: \"kubernetes.io/projected/3b688652-b82e-48df-8ffd-8d8234672564-kube-api-access-5l2ss\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:28 crc kubenswrapper[4755]: I1124 01:28:28.989734 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-27drm\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.019593 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3b688652-b82e-48df-8ffd-8d8234672564-horizon-secret-key\") pod \"horizon-5f5b8cffb5-6gsfn\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.019918 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-db-sync-config-data\") pod \"barbican-db-sync-srjbc\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.019971 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-combined-ca-bundle\") pod \"barbican-db-sync-srjbc\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.035267 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.047106 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.077694 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-sb\") pod \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.077876 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmtzg\" (UniqueName: \"kubernetes.io/projected/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-kube-api-access-nmtzg\") pod \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.077917 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-nb\") pod \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.077952 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-svc\") pod \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.077995 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-config\") pod \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.078029 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-swift-storage-0\") pod \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082185 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082251 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082268 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082364 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082422 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082470 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082491 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-scripts\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082556 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082632 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sf5ns\" (UniqueName: \"kubernetes.io/projected/5a367868-ca1d-48df-93a9-c8104ca535d9-kube-api-access-sf5ns\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082759 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-logs\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.082791 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.083157 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.083345 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-logs\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.083392 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-config-data\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.083415 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpzc6\" (UniqueName: \"kubernetes.io/projected/7e9ef39d-434b-43f6-9a27-2635b2e93775-kube-api-access-fpzc6\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.083466 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.092212 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-kube-api-access-nmtzg" (OuterVolumeSpecName: "kube-api-access-nmtzg") pod "84a99ebb-2bb7-4562-87b2-e1f5467b51dc" (UID: "84a99ebb-2bb7-4562-87b2-e1f5467b51dc"). InnerVolumeSpecName "kube-api-access-nmtzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.184957 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185002 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-logs\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185024 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpzc6\" (UniqueName: \"kubernetes.io/projected/7e9ef39d-434b-43f6-9a27-2635b2e93775-kube-api-access-fpzc6\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185042 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-config-data\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185063 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185092 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185110 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185138 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185169 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185191 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185212 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185229 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-scripts\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185255 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185279 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sf5ns\" (UniqueName: \"kubernetes.io/projected/5a367868-ca1d-48df-93a9-c8104ca535d9-kube-api-access-sf5ns\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185319 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185339 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-logs\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185385 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmtzg\" (UniqueName: \"kubernetes.io/projected/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-kube-api-access-nmtzg\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185760 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-logs\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.185989 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.186312 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-logs\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.188117 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.198229 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.199177 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.200156 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.207706 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.211557 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sf5ns\" (UniqueName: \"kubernetes.io/projected/5a367868-ca1d-48df-93a9-c8104ca535d9-kube-api-access-sf5ns\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.212583 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpzc6\" (UniqueName: \"kubernetes.io/projected/7e9ef39d-434b-43f6-9a27-2635b2e93775-kube-api-access-fpzc6\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.213865 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.216134 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.217043 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-config-data\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.219038 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-scripts\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.224657 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.232637 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.263404 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "84a99ebb-2bb7-4562-87b2-e1f5467b51dc" (UID: "84a99ebb-2bb7-4562-87b2-e1f5467b51dc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.269570 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.284955 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-mfpvq"] Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.286447 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "84a99ebb-2bb7-4562-87b2-e1f5467b51dc" (UID: "84a99ebb-2bb7-4562-87b2-e1f5467b51dc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.288346 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-nb\") pod \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\" (UID: \"84a99ebb-2bb7-4562-87b2-e1f5467b51dc\") " Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.289214 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:29 crc kubenswrapper[4755]: W1124 01:28:29.289298 4755 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/84a99ebb-2bb7-4562-87b2-e1f5467b51dc/volumes/kubernetes.io~configmap/ovsdbserver-nb Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.289309 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "84a99ebb-2bb7-4562-87b2-e1f5467b51dc" (UID: "84a99ebb-2bb7-4562-87b2-e1f5467b51dc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.290866 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-srjbc" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.302716 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "84a99ebb-2bb7-4562-87b2-e1f5467b51dc" (UID: "84a99ebb-2bb7-4562-87b2-e1f5467b51dc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.305520 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-config" (OuterVolumeSpecName: "config") pod "84a99ebb-2bb7-4562-87b2-e1f5467b51dc" (UID: "84a99ebb-2bb7-4562-87b2-e1f5467b51dc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.308357 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.335046 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "84a99ebb-2bb7-4562-87b2-e1f5467b51dc" (UID: "84a99ebb-2bb7-4562-87b2-e1f5467b51dc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.373294 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.393254 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.393286 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.393299 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.393318 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/84a99ebb-2bb7-4562-87b2-e1f5467b51dc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.474688 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-rhwbp"] Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.485856 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-c6z97"] Nov 24 01:28:29 crc kubenswrapper[4755]: W1124 01:28:29.503284 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod261fa528_26f9_4f97_93ba_dd0f4a7e6260.slice/crio-934e827f97fdd2529ef0af209773695b57e173943cce36281cf7695848f73a98 WatchSource:0}: Error finding container 934e827f97fdd2529ef0af209773695b57e173943cce36281cf7695848f73a98: Status 404 returned error can't find the container with id 934e827f97fdd2529ef0af209773695b57e173943cce36281cf7695848f73a98 Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.551038 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.636281 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.638517 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d767c7b5c-n5rr5"] Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.750351 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-tgz4r"] Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.762101 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-65g8w"] Nov 24 01:28:29 crc kubenswrapper[4755]: W1124 01:28:29.775219 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf90a48fa_6911_4df9_a1e8_d64ba7547daf.slice/crio-f8f6b49cff050f3fa7b6f37765122190407c2ab4f087e3b00068259c6dede6bc WatchSource:0}: Error finding container f8f6b49cff050f3fa7b6f37765122190407c2ab4f087e3b00068259c6dede6bc: Status 404 returned error can't find the container with id f8f6b49cff050f3fa7b6f37765122190407c2ab4f087e3b00068259c6dede6bc Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.786064 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-27drm"] Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.819203 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:28:29 crc kubenswrapper[4755]: I1124 01:28:29.987240 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d767c7b5c-n5rr5" event={"ID":"9005ec18-506f-4b7c-a06c-7d4d619e3732","Type":"ContainerStarted","Data":"eaf344b5644eca6502bd1ff588586003d4442b15deabc057b5757cd134099bd8"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.002366 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.012251 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-d6jwf" event={"ID":"84a99ebb-2bb7-4562-87b2-e1f5467b51dc","Type":"ContainerDied","Data":"fb35c7627518d65c7425c768f1f4345dc09c2d41ad08cb49daf4fb5fe2052780"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.012297 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rhwbp" event={"ID":"aea62103-9b85-495d-bb71-3c69c02a3000","Type":"ContainerStarted","Data":"9f6967441fce78e2fd7c5c4321739bfec9f5680932a44f5e113785512d133c6f"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.012312 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mfpvq" event={"ID":"6354cb83-933d-4b3f-b922-ce9e4f94e123","Type":"ContainerStarted","Data":"8148e6250c00a97e6011644b1457a767cd2a682ac0406bba9dbb0da688fde8ae"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.012327 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mfpvq" event={"ID":"6354cb83-933d-4b3f-b922-ce9e4f94e123","Type":"ContainerStarted","Data":"d48f1ee6f53a8336fc8a8fcf6877df8e2cfd39f1d7df1fcdc746dd99582cb18c"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.012346 4755 scope.go:117] "RemoveContainer" containerID="80914558c2a9e76df2be248eb3a6db4af0431c26148e19a11a6bab7c7b6472df" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.017592 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-65g8w" event={"ID":"f90a48fa-6911-4df9-a1e8-d64ba7547daf","Type":"ContainerStarted","Data":"f8f6b49cff050f3fa7b6f37765122190407c2ab4f087e3b00068259c6dede6bc"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.018890 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tgz4r" event={"ID":"ec11ae96-46e1-47a2-ae19-61941253ce7c","Type":"ContainerStarted","Data":"09154f26ddb8a0ea2894ce5fbc142ad4a70b2c5e0267872d5f6dc1a415590e8d"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.019890 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7e96f37-574f-4900-88f9-33dc41179807","Type":"ContainerStarted","Data":"ebbf79190f0a169d1c0adcac9cd978533892aa967b298eecac8c4f32ee983f71"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.021130 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" event={"ID":"19be7f69-ce3a-4c28-8934-885d997016ff","Type":"ContainerStarted","Data":"7815090864a33006b8118ff978ebf97aa09f038dcfdff5d89e55692fc01bc6e4"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.022188 4755 generic.go:334] "Generic (PLEG): container finished" podID="261fa528-26f9-4f97-93ba-dd0f4a7e6260" containerID="a777b79a156c03a79e30dfa9e685825a860d748aa658cd1be747df69174e781f" exitCode=0 Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.022215 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" event={"ID":"261fa528-26f9-4f97-93ba-dd0f4a7e6260","Type":"ContainerDied","Data":"a777b79a156c03a79e30dfa9e685825a860d748aa658cd1be747df69174e781f"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.022230 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" event={"ID":"261fa528-26f9-4f97-93ba-dd0f4a7e6260","Type":"ContainerStarted","Data":"934e827f97fdd2529ef0af209773695b57e173943cce36281cf7695848f73a98"} Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.029430 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-mfpvq" podStartSLOduration=2.029420168 podStartE2EDuration="2.029420168s" podCreationTimestamp="2025-11-24 01:28:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:28:30.028206724 +0000 UTC m=+934.714272225" watchObservedRunningTime="2025-11-24 01:28:30.029420168 +0000 UTC m=+934.715485669" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.058870 4755 scope.go:117] "RemoveContainer" containerID="0d040e626ea303c02f254781de8453c7eae5f73f34d194ae8a6ec720f2ba12ca" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.084437 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-d6jwf"] Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.092574 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-d6jwf"] Nov 24 01:28:30 crc kubenswrapper[4755]: W1124 01:28:30.130534 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bed5952_1a88_4314_befd_bb76c5431cdd.slice/crio-5b9b1ec96f8af8453f710d26619dac4942703437586b131f3dbda8a03666e5ac WatchSource:0}: Error finding container 5b9b1ec96f8af8453f710d26619dac4942703437586b131f3dbda8a03666e5ac: Status 404 returned error can't find the container with id 5b9b1ec96f8af8453f710d26619dac4942703437586b131f3dbda8a03666e5ac Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.147195 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f5b8cffb5-6gsfn"] Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.159751 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-srjbc"] Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.314988 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:28:30 crc kubenswrapper[4755]: W1124 01:28:30.321729 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a367868_ca1d_48df_93a9_c8104ca535d9.slice/crio-fa4118d48714516817d5552a212c5994f372c44e3de166a04ff50792c5d28435 WatchSource:0}: Error finding container fa4118d48714516817d5552a212c5994f372c44e3de166a04ff50792c5d28435: Status 404 returned error can't find the container with id fa4118d48714516817d5552a212c5994f372c44e3de166a04ff50792c5d28435 Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.409979 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.515558 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.650692 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-swift-storage-0\") pod \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.650758 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-svc\") pod \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.650854 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-sb\") pod \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.650900 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjtc6\" (UniqueName: \"kubernetes.io/projected/261fa528-26f9-4f97-93ba-dd0f4a7e6260-kube-api-access-hjtc6\") pod \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.650924 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-nb\") pod \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.650974 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-config\") pod \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\" (UID: \"261fa528-26f9-4f97-93ba-dd0f4a7e6260\") " Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.658842 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/261fa528-26f9-4f97-93ba-dd0f4a7e6260-kube-api-access-hjtc6" (OuterVolumeSpecName: "kube-api-access-hjtc6") pod "261fa528-26f9-4f97-93ba-dd0f4a7e6260" (UID: "261fa528-26f9-4f97-93ba-dd0f4a7e6260"). InnerVolumeSpecName "kube-api-access-hjtc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.683443 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-config" (OuterVolumeSpecName: "config") pod "261fa528-26f9-4f97-93ba-dd0f4a7e6260" (UID: "261fa528-26f9-4f97-93ba-dd0f4a7e6260"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.684220 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "261fa528-26f9-4f97-93ba-dd0f4a7e6260" (UID: "261fa528-26f9-4f97-93ba-dd0f4a7e6260"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.690271 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "261fa528-26f9-4f97-93ba-dd0f4a7e6260" (UID: "261fa528-26f9-4f97-93ba-dd0f4a7e6260"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.694979 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "261fa528-26f9-4f97-93ba-dd0f4a7e6260" (UID: "261fa528-26f9-4f97-93ba-dd0f4a7e6260"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.698252 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "261fa528-26f9-4f97-93ba-dd0f4a7e6260" (UID: "261fa528-26f9-4f97-93ba-dd0f4a7e6260"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.753187 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.753225 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.753239 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjtc6\" (UniqueName: \"kubernetes.io/projected/261fa528-26f9-4f97-93ba-dd0f4a7e6260-kube-api-access-hjtc6\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.753252 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.753264 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.753275 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/261fa528-26f9-4f97-93ba-dd0f4a7e6260-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.933467 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.976177 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:28:30 crc kubenswrapper[4755]: I1124 01:28:30.991468 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5f5b8cffb5-6gsfn"] Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.016220 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-668d8db6fc-jzr8v"] Nov 24 01:28:31 crc kubenswrapper[4755]: E1124 01:28:31.016567 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a99ebb-2bb7-4562-87b2-e1f5467b51dc" containerName="dnsmasq-dns" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.016579 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a99ebb-2bb7-4562-87b2-e1f5467b51dc" containerName="dnsmasq-dns" Nov 24 01:28:31 crc kubenswrapper[4755]: E1124 01:28:31.016616 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="261fa528-26f9-4f97-93ba-dd0f4a7e6260" containerName="init" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.016623 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="261fa528-26f9-4f97-93ba-dd0f4a7e6260" containerName="init" Nov 24 01:28:31 crc kubenswrapper[4755]: E1124 01:28:31.016646 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a99ebb-2bb7-4562-87b2-e1f5467b51dc" containerName="init" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.016652 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a99ebb-2bb7-4562-87b2-e1f5467b51dc" containerName="init" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.016804 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="261fa528-26f9-4f97-93ba-dd0f4a7e6260" containerName="init" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.016864 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a99ebb-2bb7-4562-87b2-e1f5467b51dc" containerName="dnsmasq-dns" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.017929 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.056023 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-668d8db6fc-jzr8v"] Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.058204 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7b4dc449-172f-46cf-9e94-33827628c742-horizon-secret-key\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.058294 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-config-data\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.058324 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h976n\" (UniqueName: \"kubernetes.io/projected/7b4dc449-172f-46cf-9e94-33827628c742-kube-api-access-h976n\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.058350 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-scripts\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.058393 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b4dc449-172f-46cf-9e94-33827628c742-logs\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.070259 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.094388 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" event={"ID":"261fa528-26f9-4f97-93ba-dd0f4a7e6260","Type":"ContainerDied","Data":"934e827f97fdd2529ef0af209773695b57e173943cce36281cf7695848f73a98"} Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.094438 4755 scope.go:117] "RemoveContainer" containerID="a777b79a156c03a79e30dfa9e685825a860d748aa658cd1be747df69174e781f" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.094550 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-c6z97" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.105774 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-65g8w" event={"ID":"f90a48fa-6911-4df9-a1e8-d64ba7547daf","Type":"ContainerStarted","Data":"448928381bf34cd7e310c0d907ab37d7068898962c209f6742fa685befbcf1f8"} Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.127249 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-65g8w" podStartSLOduration=3.127230518 podStartE2EDuration="3.127230518s" podCreationTimestamp="2025-11-24 01:28:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:28:31.122016741 +0000 UTC m=+935.808082242" watchObservedRunningTime="2025-11-24 01:28:31.127230518 +0000 UTC m=+935.813296019" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.137118 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-srjbc" event={"ID":"2bed5952-1a88-4314-befd-bb76c5431cdd","Type":"ContainerStarted","Data":"5b9b1ec96f8af8453f710d26619dac4942703437586b131f3dbda8a03666e5ac"} Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.139851 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f5b8cffb5-6gsfn" event={"ID":"3b688652-b82e-48df-8ffd-8d8234672564","Type":"ContainerStarted","Data":"12aa89ac1c1bd2cbf994621b93524156943ccead16db9f7ba612973f1f8cd120"} Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.140978 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a367868-ca1d-48df-93a9-c8104ca535d9","Type":"ContainerStarted","Data":"fa4118d48714516817d5552a212c5994f372c44e3de166a04ff50792c5d28435"} Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.160689 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-scripts\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.160773 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b4dc449-172f-46cf-9e94-33827628c742-logs\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.160852 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7b4dc449-172f-46cf-9e94-33827628c742-horizon-secret-key\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.160924 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-config-data\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.160949 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h976n\" (UniqueName: \"kubernetes.io/projected/7b4dc449-172f-46cf-9e94-33827628c742-kube-api-access-h976n\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.164224 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-config-data\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.164635 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b4dc449-172f-46cf-9e94-33827628c742-logs\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.165154 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-scripts\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.174826 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7e9ef39d-434b-43f6-9a27-2635b2e93775","Type":"ContainerStarted","Data":"62ca5d51cdfa25a4f6aaf1d804ce7c873bc15f261833023f36423b1a6871e435"} Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.175949 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7b4dc449-172f-46cf-9e94-33827628c742-horizon-secret-key\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.193933 4755 generic.go:334] "Generic (PLEG): container finished" podID="19be7f69-ce3a-4c28-8934-885d997016ff" containerID="36a2644c35e228b5d798e51b6eba96035be4caddb427027902c5a10828b5e7fd" exitCode=0 Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.194670 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" event={"ID":"19be7f69-ce3a-4c28-8934-885d997016ff","Type":"ContainerDied","Data":"36a2644c35e228b5d798e51b6eba96035be4caddb427027902c5a10828b5e7fd"} Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.196874 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h976n\" (UniqueName: \"kubernetes.io/projected/7b4dc449-172f-46cf-9e94-33827628c742-kube-api-access-h976n\") pod \"horizon-668d8db6fc-jzr8v\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.324459 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-c6z97"] Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.346479 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-c6z97"] Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.393749 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:28:31 crc kubenswrapper[4755]: I1124 01:28:31.935986 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-668d8db6fc-jzr8v"] Nov 24 01:28:31 crc kubenswrapper[4755]: W1124 01:28:31.971826 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b4dc449_172f_46cf_9e94_33827628c742.slice/crio-dea93c2bd2d9c4f2b6e96e523f13faa6759543d4764001c36dd63226858a3469 WatchSource:0}: Error finding container dea93c2bd2d9c4f2b6e96e523f13faa6759543d4764001c36dd63226858a3469: Status 404 returned error can't find the container with id dea93c2bd2d9c4f2b6e96e523f13faa6759543d4764001c36dd63226858a3469 Nov 24 01:28:32 crc kubenswrapper[4755]: I1124 01:28:32.016724 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="261fa528-26f9-4f97-93ba-dd0f4a7e6260" path="/var/lib/kubelet/pods/261fa528-26f9-4f97-93ba-dd0f4a7e6260/volumes" Nov 24 01:28:32 crc kubenswrapper[4755]: I1124 01:28:32.017831 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84a99ebb-2bb7-4562-87b2-e1f5467b51dc" path="/var/lib/kubelet/pods/84a99ebb-2bb7-4562-87b2-e1f5467b51dc/volumes" Nov 24 01:28:32 crc kubenswrapper[4755]: I1124 01:28:32.225071 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a367868-ca1d-48df-93a9-c8104ca535d9","Type":"ContainerStarted","Data":"00bebbf0d88f760673b1f2474ebf20e581245e288dda8d9b826bac87c1d5dc0b"} Nov 24 01:28:32 crc kubenswrapper[4755]: I1124 01:28:32.229742 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7e9ef39d-434b-43f6-9a27-2635b2e93775","Type":"ContainerStarted","Data":"517bae0a34d2fb10068c1debf56f959650efc3d5ea96b40fb33be7a8f4550371"} Nov 24 01:28:32 crc kubenswrapper[4755]: I1124 01:28:32.240756 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" event={"ID":"19be7f69-ce3a-4c28-8934-885d997016ff","Type":"ContainerStarted","Data":"1721d87ddbf2367aba3c08cef98ee96b7c9b53345ff588cf3321efb323d739b6"} Nov 24 01:28:32 crc kubenswrapper[4755]: I1124 01:28:32.241389 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:32 crc kubenswrapper[4755]: I1124 01:28:32.262732 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-668d8db6fc-jzr8v" event={"ID":"7b4dc449-172f-46cf-9e94-33827628c742","Type":"ContainerStarted","Data":"dea93c2bd2d9c4f2b6e96e523f13faa6759543d4764001c36dd63226858a3469"} Nov 24 01:28:32 crc kubenswrapper[4755]: I1124 01:28:32.272259 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" podStartSLOduration=4.272243017 podStartE2EDuration="4.272243017s" podCreationTimestamp="2025-11-24 01:28:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:28:32.268416909 +0000 UTC m=+936.954482420" watchObservedRunningTime="2025-11-24 01:28:32.272243017 +0000 UTC m=+936.958308518" Nov 24 01:28:33 crc kubenswrapper[4755]: I1124 01:28:33.278525 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a367868-ca1d-48df-93a9-c8104ca535d9","Type":"ContainerStarted","Data":"3d520c3cad01ee8f4e153fbea2e9442ad72b0787ffda3bf12bad71de940b3c47"} Nov 24 01:28:33 crc kubenswrapper[4755]: I1124 01:28:33.278637 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5a367868-ca1d-48df-93a9-c8104ca535d9" containerName="glance-log" containerID="cri-o://00bebbf0d88f760673b1f2474ebf20e581245e288dda8d9b826bac87c1d5dc0b" gracePeriod=30 Nov 24 01:28:33 crc kubenswrapper[4755]: I1124 01:28:33.278731 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5a367868-ca1d-48df-93a9-c8104ca535d9" containerName="glance-httpd" containerID="cri-o://3d520c3cad01ee8f4e153fbea2e9442ad72b0787ffda3bf12bad71de940b3c47" gracePeriod=30 Nov 24 01:28:33 crc kubenswrapper[4755]: I1124 01:28:33.285735 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7e9ef39d-434b-43f6-9a27-2635b2e93775","Type":"ContainerStarted","Data":"a2bc8ade0a949c897b8a2530874abd9ae603314a73e8d00230635749e0da94d4"} Nov 24 01:28:33 crc kubenswrapper[4755]: I1124 01:28:33.285751 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="7e9ef39d-434b-43f6-9a27-2635b2e93775" containerName="glance-log" containerID="cri-o://517bae0a34d2fb10068c1debf56f959650efc3d5ea96b40fb33be7a8f4550371" gracePeriod=30 Nov 24 01:28:33 crc kubenswrapper[4755]: I1124 01:28:33.285855 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="7e9ef39d-434b-43f6-9a27-2635b2e93775" containerName="glance-httpd" containerID="cri-o://a2bc8ade0a949c897b8a2530874abd9ae603314a73e8d00230635749e0da94d4" gracePeriod=30 Nov 24 01:28:33 crc kubenswrapper[4755]: I1124 01:28:33.294699 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:28:33 crc kubenswrapper[4755]: I1124 01:28:33.294746 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:28:33 crc kubenswrapper[4755]: I1124 01:28:33.314830 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.314808724 podStartE2EDuration="5.314808724s" podCreationTimestamp="2025-11-24 01:28:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:28:33.306022866 +0000 UTC m=+937.992088357" watchObservedRunningTime="2025-11-24 01:28:33.314808724 +0000 UTC m=+938.000874225" Nov 24 01:28:33 crc kubenswrapper[4755]: I1124 01:28:33.339922 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.33990262 podStartE2EDuration="5.33990262s" podCreationTimestamp="2025-11-24 01:28:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:28:33.337861332 +0000 UTC m=+938.023926833" watchObservedRunningTime="2025-11-24 01:28:33.33990262 +0000 UTC m=+938.025968121" Nov 24 01:28:34 crc kubenswrapper[4755]: I1124 01:28:34.298035 4755 generic.go:334] "Generic (PLEG): container finished" podID="7e9ef39d-434b-43f6-9a27-2635b2e93775" containerID="a2bc8ade0a949c897b8a2530874abd9ae603314a73e8d00230635749e0da94d4" exitCode=0 Nov 24 01:28:34 crc kubenswrapper[4755]: I1124 01:28:34.298068 4755 generic.go:334] "Generic (PLEG): container finished" podID="7e9ef39d-434b-43f6-9a27-2635b2e93775" containerID="517bae0a34d2fb10068c1debf56f959650efc3d5ea96b40fb33be7a8f4550371" exitCode=143 Nov 24 01:28:34 crc kubenswrapper[4755]: I1124 01:28:34.298118 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7e9ef39d-434b-43f6-9a27-2635b2e93775","Type":"ContainerDied","Data":"a2bc8ade0a949c897b8a2530874abd9ae603314a73e8d00230635749e0da94d4"} Nov 24 01:28:34 crc kubenswrapper[4755]: I1124 01:28:34.298161 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7e9ef39d-434b-43f6-9a27-2635b2e93775","Type":"ContainerDied","Data":"517bae0a34d2fb10068c1debf56f959650efc3d5ea96b40fb33be7a8f4550371"} Nov 24 01:28:34 crc kubenswrapper[4755]: I1124 01:28:34.300727 4755 generic.go:334] "Generic (PLEG): container finished" podID="6354cb83-933d-4b3f-b922-ce9e4f94e123" containerID="8148e6250c00a97e6011644b1457a767cd2a682ac0406bba9dbb0da688fde8ae" exitCode=0 Nov 24 01:28:34 crc kubenswrapper[4755]: I1124 01:28:34.300801 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mfpvq" event={"ID":"6354cb83-933d-4b3f-b922-ce9e4f94e123","Type":"ContainerDied","Data":"8148e6250c00a97e6011644b1457a767cd2a682ac0406bba9dbb0da688fde8ae"} Nov 24 01:28:34 crc kubenswrapper[4755]: I1124 01:28:34.305836 4755 generic.go:334] "Generic (PLEG): container finished" podID="5a367868-ca1d-48df-93a9-c8104ca535d9" containerID="3d520c3cad01ee8f4e153fbea2e9442ad72b0787ffda3bf12bad71de940b3c47" exitCode=0 Nov 24 01:28:34 crc kubenswrapper[4755]: I1124 01:28:34.305857 4755 generic.go:334] "Generic (PLEG): container finished" podID="5a367868-ca1d-48df-93a9-c8104ca535d9" containerID="00bebbf0d88f760673b1f2474ebf20e581245e288dda8d9b826bac87c1d5dc0b" exitCode=143 Nov 24 01:28:34 crc kubenswrapper[4755]: I1124 01:28:34.305879 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a367868-ca1d-48df-93a9-c8104ca535d9","Type":"ContainerDied","Data":"3d520c3cad01ee8f4e153fbea2e9442ad72b0787ffda3bf12bad71de940b3c47"} Nov 24 01:28:34 crc kubenswrapper[4755]: I1124 01:28:34.305905 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a367868-ca1d-48df-93a9-c8104ca535d9","Type":"ContainerDied","Data":"00bebbf0d88f760673b1f2474ebf20e581245e288dda8d9b826bac87c1d5dc0b"} Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.870635 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d767c7b5c-n5rr5"] Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.931785 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-55cf755d8-2cns2"] Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.933796 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.936520 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.957128 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-55cf755d8-2cns2"] Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.993924 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnrmt\" (UniqueName: \"kubernetes.io/projected/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-kube-api-access-lnrmt\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.994136 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-logs\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.994201 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-config-data\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.994255 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-secret-key\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.994280 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-combined-ca-bundle\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.994329 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-scripts\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:37 crc kubenswrapper[4755]: I1124 01:28:37.994573 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-tls-certs\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.017145 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-668d8db6fc-jzr8v"] Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.034650 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-75d8fb7cd4-vbxkn"] Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.036408 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.052943 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-75d8fb7cd4-vbxkn"] Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.095547 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-combined-ca-bundle\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.095819 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9npzz\" (UniqueName: \"kubernetes.io/projected/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-kube-api-access-9npzz\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.095952 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-tls-certs\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.096040 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-config-data\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.096168 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnrmt\" (UniqueName: \"kubernetes.io/projected/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-kube-api-access-lnrmt\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.096244 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-logs\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.096353 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-horizon-secret-key\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.096447 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-config-data\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.096562 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-horizon-tls-certs\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.096720 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-scripts\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.096850 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-combined-ca-bundle\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.096961 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-secret-key\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.097092 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-scripts\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.097175 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-logs\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.098759 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-logs\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.100909 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-config-data\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.102255 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-scripts\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.105181 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-secret-key\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.105200 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-tls-certs\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.105385 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-combined-ca-bundle\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.119782 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnrmt\" (UniqueName: \"kubernetes.io/projected/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-kube-api-access-lnrmt\") pod \"horizon-55cf755d8-2cns2\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.198959 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-config-data\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.199233 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-horizon-secret-key\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.199264 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-horizon-tls-certs\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.199285 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-scripts\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.199333 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-logs\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.199349 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-combined-ca-bundle\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.199365 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9npzz\" (UniqueName: \"kubernetes.io/projected/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-kube-api-access-9npzz\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.200646 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-config-data\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.202254 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-scripts\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.202801 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-logs\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.204992 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-horizon-tls-certs\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.215183 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-horizon-secret-key\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.215423 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-combined-ca-bundle\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.221462 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9npzz\" (UniqueName: \"kubernetes.io/projected/5d176bdd-fe2f-4ed0-a930-2a6ae568b400-kube-api-access-9npzz\") pod \"horizon-75d8fb7cd4-vbxkn\" (UID: \"5d176bdd-fe2f-4ed0-a930-2a6ae568b400\") " pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.257345 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:28:38 crc kubenswrapper[4755]: I1124 01:28:38.355634 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:28:39 crc kubenswrapper[4755]: I1124 01:28:39.036782 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:28:39 crc kubenswrapper[4755]: I1124 01:28:39.084798 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-x9ksn"] Nov 24 01:28:39 crc kubenswrapper[4755]: I1124 01:28:39.085372 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="dnsmasq-dns" containerID="cri-o://29634fe605bd85d02d49022c7d12a34c46e0e4db749363cdbab033f911221084" gracePeriod=10 Nov 24 01:28:39 crc kubenswrapper[4755]: I1124 01:28:39.364361 4755 generic.go:334] "Generic (PLEG): container finished" podID="46a31b0c-c17e-469f-823e-f56504308b2c" containerID="29634fe605bd85d02d49022c7d12a34c46e0e4db749363cdbab033f911221084" exitCode=0 Nov 24 01:28:39 crc kubenswrapper[4755]: I1124 01:28:39.364652 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" event={"ID":"46a31b0c-c17e-469f-823e-f56504308b2c","Type":"ContainerDied","Data":"29634fe605bd85d02d49022c7d12a34c46e0e4db749363cdbab033f911221084"} Nov 24 01:28:40 crc kubenswrapper[4755]: I1124 01:28:40.114810 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.578769 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.678686 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-logs\") pod \"5a367868-ca1d-48df-93a9-c8104ca535d9\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.678757 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sf5ns\" (UniqueName: \"kubernetes.io/projected/5a367868-ca1d-48df-93a9-c8104ca535d9-kube-api-access-sf5ns\") pod \"5a367868-ca1d-48df-93a9-c8104ca535d9\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.678799 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-public-tls-certs\") pod \"5a367868-ca1d-48df-93a9-c8104ca535d9\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.678876 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-combined-ca-bundle\") pod \"5a367868-ca1d-48df-93a9-c8104ca535d9\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.678899 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"5a367868-ca1d-48df-93a9-c8104ca535d9\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.678968 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-scripts\") pod \"5a367868-ca1d-48df-93a9-c8104ca535d9\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.679003 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-config-data\") pod \"5a367868-ca1d-48df-93a9-c8104ca535d9\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.679081 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-httpd-run\") pod \"5a367868-ca1d-48df-93a9-c8104ca535d9\" (UID: \"5a367868-ca1d-48df-93a9-c8104ca535d9\") " Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.679225 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-logs" (OuterVolumeSpecName: "logs") pod "5a367868-ca1d-48df-93a9-c8104ca535d9" (UID: "5a367868-ca1d-48df-93a9-c8104ca535d9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.679490 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.679867 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5a367868-ca1d-48df-93a9-c8104ca535d9" (UID: "5a367868-ca1d-48df-93a9-c8104ca535d9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.687699 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "5a367868-ca1d-48df-93a9-c8104ca535d9" (UID: "5a367868-ca1d-48df-93a9-c8104ca535d9"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.688166 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-scripts" (OuterVolumeSpecName: "scripts") pod "5a367868-ca1d-48df-93a9-c8104ca535d9" (UID: "5a367868-ca1d-48df-93a9-c8104ca535d9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.690243 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a367868-ca1d-48df-93a9-c8104ca535d9-kube-api-access-sf5ns" (OuterVolumeSpecName: "kube-api-access-sf5ns") pod "5a367868-ca1d-48df-93a9-c8104ca535d9" (UID: "5a367868-ca1d-48df-93a9-c8104ca535d9"). InnerVolumeSpecName "kube-api-access-sf5ns". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.715774 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a367868-ca1d-48df-93a9-c8104ca535d9" (UID: "5a367868-ca1d-48df-93a9-c8104ca535d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.730497 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-config-data" (OuterVolumeSpecName: "config-data") pod "5a367868-ca1d-48df-93a9-c8104ca535d9" (UID: "5a367868-ca1d-48df-93a9-c8104ca535d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.740003 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5a367868-ca1d-48df-93a9-c8104ca535d9" (UID: "5a367868-ca1d-48df-93a9-c8104ca535d9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.781326 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a367868-ca1d-48df-93a9-c8104ca535d9-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.781364 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sf5ns\" (UniqueName: \"kubernetes.io/projected/5a367868-ca1d-48df-93a9-c8104ca535d9-kube-api-access-sf5ns\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.781378 4755 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.781416 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.781427 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.781438 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.781447 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a367868-ca1d-48df-93a9-c8104ca535d9-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.802995 4755 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Nov 24 01:28:42 crc kubenswrapper[4755]: I1124 01:28:42.883168 4755 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.406442 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a367868-ca1d-48df-93a9-c8104ca535d9","Type":"ContainerDied","Data":"fa4118d48714516817d5552a212c5994f372c44e3de166a04ff50792c5d28435"} Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.406831 4755 scope.go:117] "RemoveContainer" containerID="3d520c3cad01ee8f4e153fbea2e9442ad72b0787ffda3bf12bad71de940b3c47" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.406498 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.439895 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.445274 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.461629 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:28:43 crc kubenswrapper[4755]: E1124 01:28:43.462058 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a367868-ca1d-48df-93a9-c8104ca535d9" containerName="glance-httpd" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.462079 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a367868-ca1d-48df-93a9-c8104ca535d9" containerName="glance-httpd" Nov 24 01:28:43 crc kubenswrapper[4755]: E1124 01:28:43.462094 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a367868-ca1d-48df-93a9-c8104ca535d9" containerName="glance-log" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.462103 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a367868-ca1d-48df-93a9-c8104ca535d9" containerName="glance-log" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.462302 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a367868-ca1d-48df-93a9-c8104ca535d9" containerName="glance-httpd" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.462325 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a367868-ca1d-48df-93a9-c8104ca535d9" containerName="glance-log" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.465142 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.467269 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.469220 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.484354 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.495791 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.495890 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-logs\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.495936 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.495973 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjzjl\" (UniqueName: \"kubernetes.io/projected/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-kube-api-access-jjzjl\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.496066 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.496100 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-config-data\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.496138 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.496165 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-scripts\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.598730 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.598782 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjzjl\" (UniqueName: \"kubernetes.io/projected/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-kube-api-access-jjzjl\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.598856 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.598877 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-config-data\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.598900 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.598917 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-scripts\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.598937 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.598981 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-logs\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.599368 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.599385 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-logs\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.599733 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.608631 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.608855 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-scripts\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.609563 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.620928 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-config-data\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.636898 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjzjl\" (UniqueName: \"kubernetes.io/projected/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-kube-api-access-jjzjl\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.676816 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:28:43 crc kubenswrapper[4755]: I1124 01:28:43.788078 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 24 01:28:44 crc kubenswrapper[4755]: I1124 01:28:44.008931 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a367868-ca1d-48df-93a9-c8104ca535d9" path="/var/lib/kubelet/pods/5a367868-ca1d-48df-93a9-c8104ca535d9/volumes" Nov 24 01:28:44 crc kubenswrapper[4755]: E1124 01:28:44.511100 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Nov 24 01:28:44 crc kubenswrapper[4755]: E1124 01:28:44.511347 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gpxxc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-tgz4r_openstack(ec11ae96-46e1-47a2-ae19-61941253ce7c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:28:44 crc kubenswrapper[4755]: E1124 01:28:44.513419 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-tgz4r" podUID="ec11ae96-46e1-47a2-ae19-61941253ce7c" Nov 24 01:28:45 crc kubenswrapper[4755]: I1124 01:28:45.115277 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Nov 24 01:28:45 crc kubenswrapper[4755]: E1124 01:28:45.423337 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-tgz4r" podUID="ec11ae96-46e1-47a2-ae19-61941253ce7c" Nov 24 01:28:48 crc kubenswrapper[4755]: E1124 01:28:48.842224 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Nov 24 01:28:48 crc kubenswrapper[4755]: E1124 01:28:48.842784 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n87h647h96h5b7h54bh67h676h599h4hc5h678hb7h55fh74h58fh647h696hc4h5bfh68fh5b8hdchbh699h685hcfh56fhbfh574h559h675h644q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-czm7j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6d767c7b5c-n5rr5_openstack(9005ec18-506f-4b7c-a06c-7d4d619e3732): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:28:48 crc kubenswrapper[4755]: E1124 01:28:48.845157 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6d767c7b5c-n5rr5" podUID="9005ec18-506f-4b7c-a06c-7d4d619e3732" Nov 24 01:28:50 crc kubenswrapper[4755]: I1124 01:28:50.114639 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Nov 24 01:28:50 crc kubenswrapper[4755]: I1124 01:28:50.114948 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:28:50 crc kubenswrapper[4755]: I1124 01:28:50.463216 4755 generic.go:334] "Generic (PLEG): container finished" podID="f90a48fa-6911-4df9-a1e8-d64ba7547daf" containerID="448928381bf34cd7e310c0d907ab37d7068898962c209f6742fa685befbcf1f8" exitCode=0 Nov 24 01:28:50 crc kubenswrapper[4755]: I1124 01:28:50.463281 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-65g8w" event={"ID":"f90a48fa-6911-4df9-a1e8-d64ba7547daf","Type":"ContainerDied","Data":"448928381bf34cd7e310c0d907ab37d7068898962c209f6742fa685befbcf1f8"} Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.114471 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.846807 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.960195 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-logs\") pod \"7e9ef39d-434b-43f6-9a27-2635b2e93775\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.960267 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-scripts\") pod \"7e9ef39d-434b-43f6-9a27-2635b2e93775\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.960316 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-httpd-run\") pod \"7e9ef39d-434b-43f6-9a27-2635b2e93775\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.960389 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"7e9ef39d-434b-43f6-9a27-2635b2e93775\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.960426 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpzc6\" (UniqueName: \"kubernetes.io/projected/7e9ef39d-434b-43f6-9a27-2635b2e93775-kube-api-access-fpzc6\") pod \"7e9ef39d-434b-43f6-9a27-2635b2e93775\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.960465 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-config-data\") pod \"7e9ef39d-434b-43f6-9a27-2635b2e93775\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.960488 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-internal-tls-certs\") pod \"7e9ef39d-434b-43f6-9a27-2635b2e93775\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.960551 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-combined-ca-bundle\") pod \"7e9ef39d-434b-43f6-9a27-2635b2e93775\" (UID: \"7e9ef39d-434b-43f6-9a27-2635b2e93775\") " Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.961094 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7e9ef39d-434b-43f6-9a27-2635b2e93775" (UID: "7e9ef39d-434b-43f6-9a27-2635b2e93775"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.961307 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-logs" (OuterVolumeSpecName: "logs") pod "7e9ef39d-434b-43f6-9a27-2635b2e93775" (UID: "7e9ef39d-434b-43f6-9a27-2635b2e93775"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.965948 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-scripts" (OuterVolumeSpecName: "scripts") pod "7e9ef39d-434b-43f6-9a27-2635b2e93775" (UID: "7e9ef39d-434b-43f6-9a27-2635b2e93775"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.966340 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "7e9ef39d-434b-43f6-9a27-2635b2e93775" (UID: "7e9ef39d-434b-43f6-9a27-2635b2e93775"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.966576 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e9ef39d-434b-43f6-9a27-2635b2e93775-kube-api-access-fpzc6" (OuterVolumeSpecName: "kube-api-access-fpzc6") pod "7e9ef39d-434b-43f6-9a27-2635b2e93775" (UID: "7e9ef39d-434b-43f6-9a27-2635b2e93775"). InnerVolumeSpecName "kube-api-access-fpzc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:55 crc kubenswrapper[4755]: I1124 01:28:55.994965 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e9ef39d-434b-43f6-9a27-2635b2e93775" (UID: "7e9ef39d-434b-43f6-9a27-2635b2e93775"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.010578 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7e9ef39d-434b-43f6-9a27-2635b2e93775" (UID: "7e9ef39d-434b-43f6-9a27-2635b2e93775"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.015779 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-config-data" (OuterVolumeSpecName: "config-data") pod "7e9ef39d-434b-43f6-9a27-2635b2e93775" (UID: "7e9ef39d-434b-43f6-9a27-2635b2e93775"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.062500 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.062531 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.062540 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.062547 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7e9ef39d-434b-43f6-9a27-2635b2e93775-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.062572 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.062582 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpzc6\" (UniqueName: \"kubernetes.io/projected/7e9ef39d-434b-43f6-9a27-2635b2e93775-kube-api-access-fpzc6\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.062592 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.062614 4755 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e9ef39d-434b-43f6-9a27-2635b2e93775-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.094555 4755 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.164418 4755 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.521096 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7e9ef39d-434b-43f6-9a27-2635b2e93775","Type":"ContainerDied","Data":"62ca5d51cdfa25a4f6aaf1d804ce7c873bc15f261833023f36423b1a6871e435"} Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.521223 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.582118 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.591898 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.609781 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:28:56 crc kubenswrapper[4755]: E1124 01:28:56.610170 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e9ef39d-434b-43f6-9a27-2635b2e93775" containerName="glance-httpd" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.610187 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e9ef39d-434b-43f6-9a27-2635b2e93775" containerName="glance-httpd" Nov 24 01:28:56 crc kubenswrapper[4755]: E1124 01:28:56.610218 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e9ef39d-434b-43f6-9a27-2635b2e93775" containerName="glance-log" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.610225 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e9ef39d-434b-43f6-9a27-2635b2e93775" containerName="glance-log" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.610388 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e9ef39d-434b-43f6-9a27-2635b2e93775" containerName="glance-httpd" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.610410 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e9ef39d-434b-43f6-9a27-2635b2e93775" containerName="glance-log" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.611309 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.618021 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.652324 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.652541 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.684341 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.684423 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.684451 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-logs\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.684473 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.684504 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9q5t\" (UniqueName: \"kubernetes.io/projected/a10198c5-a145-4df1-a99d-14463ff5d048-kube-api-access-k9q5t\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.684524 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.685451 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.685693 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.787331 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.787684 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.787784 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.787812 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-logs\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.787835 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.787868 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9q5t\" (UniqueName: \"kubernetes.io/projected/a10198c5-a145-4df1-a99d-14463ff5d048-kube-api-access-k9q5t\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.787887 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.787910 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.788641 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.789025 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-logs\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.789055 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.793379 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.798157 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.799805 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.803924 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.805655 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9q5t\" (UniqueName: \"kubernetes.io/projected/a10198c5-a145-4df1-a99d-14463ff5d048-kube-api-access-k9q5t\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.817724 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:28:56 crc kubenswrapper[4755]: I1124 01:28:56.984409 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 24 01:28:57 crc kubenswrapper[4755]: E1124 01:28:57.833722 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Nov 24 01:28:57 crc kubenswrapper[4755]: E1124 01:28:57.834003 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n596h676h57ch95h5d4h89h5c7h5bfh688h574h649h564h68dh5d9h55chd4h5c5hf6h5f5h685hbch64fh668h68bhd8h5f9hb7hf9h5fbh595h677h595q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h976n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-668d8db6fc-jzr8v_openstack(7b4dc449-172f-46cf-9e94-33827628c742): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:28:57 crc kubenswrapper[4755]: E1124 01:28:57.843197 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Nov 24 01:28:57 crc kubenswrapper[4755]: E1124 01:28:57.843477 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n88h54h597h5ch5c4h96hf6h679h65fh58ch5c5h56h97h594h55bh55bh678h5d7h665h5d4h6dh5f9h585hc5h75h586h56ch5b9h56dhbbh7ch649q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5l2ss,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-5f5b8cffb5-6gsfn_openstack(3b688652-b82e-48df-8ffd-8d8234672564): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:28:57 crc kubenswrapper[4755]: E1124 01:28:57.858674 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-5f5b8cffb5-6gsfn" podUID="3b688652-b82e-48df-8ffd-8d8234672564" Nov 24 01:28:57 crc kubenswrapper[4755]: E1124 01:28:57.858678 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-668d8db6fc-jzr8v" podUID="7b4dc449-172f-46cf-9e94-33827628c742" Nov 24 01:28:57 crc kubenswrapper[4755]: I1124 01:28:57.858984 4755 scope.go:117] "RemoveContainer" containerID="00bebbf0d88f760673b1f2474ebf20e581245e288dda8d9b826bac87c1d5dc0b" Nov 24 01:28:57 crc kubenswrapper[4755]: I1124 01:28:57.936731 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:57 crc kubenswrapper[4755]: I1124 01:28:57.940716 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:57 crc kubenswrapper[4755]: I1124 01:28:57.944099 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.006134 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-combined-ca-bundle\") pod \"6354cb83-933d-4b3f-b922-ce9e4f94e123\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008059 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7m7tp\" (UniqueName: \"kubernetes.io/projected/f90a48fa-6911-4df9-a1e8-d64ba7547daf-kube-api-access-7m7tp\") pod \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008199 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-config\") pod \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008236 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-fernet-keys\") pod \"6354cb83-933d-4b3f-b922-ce9e4f94e123\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008250 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-combined-ca-bundle\") pod \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\" (UID: \"f90a48fa-6911-4df9-a1e8-d64ba7547daf\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008290 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnl9m\" (UniqueName: \"kubernetes.io/projected/6354cb83-933d-4b3f-b922-ce9e4f94e123-kube-api-access-fnl9m\") pod \"6354cb83-933d-4b3f-b922-ce9e4f94e123\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008339 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9005ec18-506f-4b7c-a06c-7d4d619e3732-horizon-secret-key\") pod \"9005ec18-506f-4b7c-a06c-7d4d619e3732\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008371 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-credential-keys\") pod \"6354cb83-933d-4b3f-b922-ce9e4f94e123\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008396 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-scripts\") pod \"9005ec18-506f-4b7c-a06c-7d4d619e3732\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008429 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9005ec18-506f-4b7c-a06c-7d4d619e3732-logs\") pod \"9005ec18-506f-4b7c-a06c-7d4d619e3732\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008456 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-config-data\") pod \"6354cb83-933d-4b3f-b922-ce9e4f94e123\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008484 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-scripts\") pod \"6354cb83-933d-4b3f-b922-ce9e4f94e123\" (UID: \"6354cb83-933d-4b3f-b922-ce9e4f94e123\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008529 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czm7j\" (UniqueName: \"kubernetes.io/projected/9005ec18-506f-4b7c-a06c-7d4d619e3732-kube-api-access-czm7j\") pod \"9005ec18-506f-4b7c-a06c-7d4d619e3732\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.008577 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-config-data\") pod \"9005ec18-506f-4b7c-a06c-7d4d619e3732\" (UID: \"9005ec18-506f-4b7c-a06c-7d4d619e3732\") " Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.011070 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9005ec18-506f-4b7c-a06c-7d4d619e3732-logs" (OuterVolumeSpecName: "logs") pod "9005ec18-506f-4b7c-a06c-7d4d619e3732" (UID: "9005ec18-506f-4b7c-a06c-7d4d619e3732"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.011739 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-scripts" (OuterVolumeSpecName: "scripts") pod "9005ec18-506f-4b7c-a06c-7d4d619e3732" (UID: "9005ec18-506f-4b7c-a06c-7d4d619e3732"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.011965 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-config-data" (OuterVolumeSpecName: "config-data") pod "9005ec18-506f-4b7c-a06c-7d4d619e3732" (UID: "9005ec18-506f-4b7c-a06c-7d4d619e3732"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.014855 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9005ec18-506f-4b7c-a06c-7d4d619e3732-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9005ec18-506f-4b7c-a06c-7d4d619e3732" (UID: "9005ec18-506f-4b7c-a06c-7d4d619e3732"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.015337 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-scripts" (OuterVolumeSpecName: "scripts") pod "6354cb83-933d-4b3f-b922-ce9e4f94e123" (UID: "6354cb83-933d-4b3f-b922-ce9e4f94e123"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.020180 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6354cb83-933d-4b3f-b922-ce9e4f94e123-kube-api-access-fnl9m" (OuterVolumeSpecName: "kube-api-access-fnl9m") pod "6354cb83-933d-4b3f-b922-ce9e4f94e123" (UID: "6354cb83-933d-4b3f-b922-ce9e4f94e123"). InnerVolumeSpecName "kube-api-access-fnl9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.020800 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f90a48fa-6911-4df9-a1e8-d64ba7547daf-kube-api-access-7m7tp" (OuterVolumeSpecName: "kube-api-access-7m7tp") pod "f90a48fa-6911-4df9-a1e8-d64ba7547daf" (UID: "f90a48fa-6911-4df9-a1e8-d64ba7547daf"). InnerVolumeSpecName "kube-api-access-7m7tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.021782 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e9ef39d-434b-43f6-9a27-2635b2e93775" path="/var/lib/kubelet/pods/7e9ef39d-434b-43f6-9a27-2635b2e93775/volumes" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.023729 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9005ec18-506f-4b7c-a06c-7d4d619e3732-kube-api-access-czm7j" (OuterVolumeSpecName: "kube-api-access-czm7j") pod "9005ec18-506f-4b7c-a06c-7d4d619e3732" (UID: "9005ec18-506f-4b7c-a06c-7d4d619e3732"). InnerVolumeSpecName "kube-api-access-czm7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.024281 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6354cb83-933d-4b3f-b922-ce9e4f94e123" (UID: "6354cb83-933d-4b3f-b922-ce9e4f94e123"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.032410 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6354cb83-933d-4b3f-b922-ce9e4f94e123" (UID: "6354cb83-933d-4b3f-b922-ce9e4f94e123"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.043010 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-config-data" (OuterVolumeSpecName: "config-data") pod "6354cb83-933d-4b3f-b922-ce9e4f94e123" (UID: "6354cb83-933d-4b3f-b922-ce9e4f94e123"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.083481 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f90a48fa-6911-4df9-a1e8-d64ba7547daf" (UID: "f90a48fa-6911-4df9-a1e8-d64ba7547daf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.083513 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-config" (OuterVolumeSpecName: "config") pod "f90a48fa-6911-4df9-a1e8-d64ba7547daf" (UID: "f90a48fa-6911-4df9-a1e8-d64ba7547daf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.110514 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6354cb83-933d-4b3f-b922-ce9e4f94e123" (UID: "6354cb83-933d-4b3f-b922-ce9e4f94e123"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.110960 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czm7j\" (UniqueName: \"kubernetes.io/projected/9005ec18-506f-4b7c-a06c-7d4d619e3732-kube-api-access-czm7j\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.110985 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.110999 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111011 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7m7tp\" (UniqueName: \"kubernetes.io/projected/f90a48fa-6911-4df9-a1e8-d64ba7547daf-kube-api-access-7m7tp\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111023 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111033 4755 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111044 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f90a48fa-6911-4df9-a1e8-d64ba7547daf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111055 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnl9m\" (UniqueName: \"kubernetes.io/projected/6354cb83-933d-4b3f-b922-ce9e4f94e123-kube-api-access-fnl9m\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111066 4755 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9005ec18-506f-4b7c-a06c-7d4d619e3732-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111076 4755 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111087 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9005ec18-506f-4b7c-a06c-7d4d619e3732-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111098 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9005ec18-506f-4b7c-a06c-7d4d619e3732-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111108 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.111119 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6354cb83-933d-4b3f-b922-ce9e4f94e123-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.383733 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-75d8fb7cd4-vbxkn"] Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.542243 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mfpvq" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.542242 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mfpvq" event={"ID":"6354cb83-933d-4b3f-b922-ce9e4f94e123","Type":"ContainerDied","Data":"d48f1ee6f53a8336fc8a8fcf6877df8e2cfd39f1d7df1fcdc746dd99582cb18c"} Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.543277 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d48f1ee6f53a8336fc8a8fcf6877df8e2cfd39f1d7df1fcdc746dd99582cb18c" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.551204 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d767c7b5c-n5rr5" event={"ID":"9005ec18-506f-4b7c-a06c-7d4d619e3732","Type":"ContainerDied","Data":"eaf344b5644eca6502bd1ff588586003d4442b15deabc057b5757cd134099bd8"} Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.551249 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d767c7b5c-n5rr5" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.556344 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-65g8w" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.556775 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-65g8w" event={"ID":"f90a48fa-6911-4df9-a1e8-d64ba7547daf","Type":"ContainerDied","Data":"f8f6b49cff050f3fa7b6f37765122190407c2ab4f087e3b00068259c6dede6bc"} Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.556832 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8f6b49cff050f3fa7b6f37765122190407c2ab4f087e3b00068259c6dede6bc" Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.693317 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d767c7b5c-n5rr5"] Nov 24 01:28:58 crc kubenswrapper[4755]: I1124 01:28:58.698762 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6d767c7b5c-n5rr5"] Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.024624 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-mfpvq"] Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.032672 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-mfpvq"] Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.124387 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-lb2nq"] Nov 24 01:28:59 crc kubenswrapper[4755]: E1124 01:28:59.124953 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6354cb83-933d-4b3f-b922-ce9e4f94e123" containerName="keystone-bootstrap" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.125058 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6354cb83-933d-4b3f-b922-ce9e4f94e123" containerName="keystone-bootstrap" Nov 24 01:28:59 crc kubenswrapper[4755]: E1124 01:28:59.125170 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f90a48fa-6911-4df9-a1e8-d64ba7547daf" containerName="neutron-db-sync" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.125262 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f90a48fa-6911-4df9-a1e8-d64ba7547daf" containerName="neutron-db-sync" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.125512 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="6354cb83-933d-4b3f-b922-ce9e4f94e123" containerName="keystone-bootstrap" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.125624 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f90a48fa-6911-4df9-a1e8-d64ba7547daf" containerName="neutron-db-sync" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.126401 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.128778 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.130308 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-llx85" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.130590 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.130700 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.144890 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.148304 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lb2nq"] Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.237639 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-combined-ca-bundle\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.237735 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-config-data\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.237769 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8qkn\" (UniqueName: \"kubernetes.io/projected/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-kube-api-access-k8qkn\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.237821 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-credential-keys\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.237869 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-fernet-keys\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.237905 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-scripts\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.244224 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-22wt6"] Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.254723 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.270344 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-22wt6"] Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342512 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-combined-ca-bundle\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342578 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342629 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-config-data\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342652 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8qkn\" (UniqueName: \"kubernetes.io/projected/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-kube-api-access-k8qkn\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342672 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342697 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-config\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342725 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-credential-keys\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342741 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342775 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgv62\" (UniqueName: \"kubernetes.io/projected/93836cb5-1416-48b2-bd66-984b5a90ee2b-kube-api-access-kgv62\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342792 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-fernet-keys\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342817 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-scripts\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.342840 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.358188 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-credential-keys\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.358312 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-combined-ca-bundle\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.358511 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-fernet-keys\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.359476 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-scripts\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.378994 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-config-data\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.389089 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8qkn\" (UniqueName: \"kubernetes.io/projected/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-kube-api-access-k8qkn\") pod \"keystone-bootstrap-lb2nq\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.444443 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.444521 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.444549 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-config\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.444577 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.444624 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgv62\" (UniqueName: \"kubernetes.io/projected/93836cb5-1416-48b2-bd66-984b5a90ee2b-kube-api-access-kgv62\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.444668 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.445700 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.445744 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.445877 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.446318 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.446624 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-config\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.468924 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.471643 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-55bcc7b6cd-xzjdz"] Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.473199 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.476803 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.476967 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2kjxr" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.477027 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.476996 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.479190 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgv62\" (UniqueName: \"kubernetes.io/projected/93836cb5-1416-48b2-bd66-984b5a90ee2b-kube-api-access-kgv62\") pod \"dnsmasq-dns-84b966f6c9-22wt6\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.480836 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55bcc7b6cd-xzjdz"] Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.547029 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-config\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.547281 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnggt\" (UniqueName: \"kubernetes.io/projected/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-kube-api-access-qnggt\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.547384 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-ovndb-tls-certs\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.547448 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-combined-ca-bundle\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.547545 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-httpd-config\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.603331 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.649537 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-config\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.649690 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnggt\" (UniqueName: \"kubernetes.io/projected/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-kube-api-access-qnggt\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.649722 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-ovndb-tls-certs\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.649742 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-combined-ca-bundle\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.650208 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-httpd-config\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.653536 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-ovndb-tls-certs\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.653589 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-combined-ca-bundle\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.655311 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-httpd-config\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.656253 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-config\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.667192 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnggt\" (UniqueName: \"kubernetes.io/projected/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-kube-api-access-qnggt\") pod \"neutron-55bcc7b6cd-xzjdz\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: I1124 01:28:59.835958 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:28:59 crc kubenswrapper[4755]: E1124 01:28:59.992495 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Nov 24 01:28:59 crc kubenswrapper[4755]: E1124 01:28:59.992713 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pdb5j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-rhwbp_openstack(aea62103-9b85-495d-bb71-3c69c02a3000): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:28:59 crc kubenswrapper[4755]: E1124 01:28:59.993964 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-rhwbp" podUID="aea62103-9b85-495d-bb71-3c69c02a3000" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.011073 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6354cb83-933d-4b3f-b922-ce9e4f94e123" path="/var/lib/kubelet/pods/6354cb83-933d-4b3f-b922-ce9e4f94e123/volumes" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.012254 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9005ec18-506f-4b7c-a06c-7d4d619e3732" path="/var/lib/kubelet/pods/9005ec18-506f-4b7c-a06c-7d4d619e3732/volumes" Nov 24 01:29:00 crc kubenswrapper[4755]: E1124 01:29:00.330791 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Nov 24 01:29:00 crc kubenswrapper[4755]: E1124 01:29:00.330942 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8bh547h5dbh686h54fhbch66h584hcfh567h5fbh669h64fh65h659h675h64fh686h57fh6hd4h644h5c4h64h4h65dh8dhc8h64ch549h58h569q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g2srq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(a7e96f37-574f-4900-88f9-33dc41179807): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:29:00 crc kubenswrapper[4755]: W1124 01:29:00.348375 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d176bdd_fe2f_4ed0_a930_2a6ae568b400.slice/crio-1d08843e6bf07f768c18e3187c49250f568ed1f802dff94c5096131ab41cbc11 WatchSource:0}: Error finding container 1d08843e6bf07f768c18e3187c49250f568ed1f802dff94c5096131ab41cbc11: Status 404 returned error can't find the container with id 1d08843e6bf07f768c18e3187c49250f568ed1f802dff94c5096131ab41cbc11 Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.376764 4755 scope.go:117] "RemoveContainer" containerID="a2bc8ade0a949c897b8a2530874abd9ae603314a73e8d00230635749e0da94d4" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.546360 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.564690 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.567872 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3b688652-b82e-48df-8ffd-8d8234672564-horizon-secret-key\") pod \"3b688652-b82e-48df-8ffd-8d8234672564\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.567909 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-scripts\") pod \"3b688652-b82e-48df-8ffd-8d8234672564\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.567931 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-swift-storage-0\") pod \"46a31b0c-c17e-469f-823e-f56504308b2c\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.567958 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-config\") pod \"46a31b0c-c17e-469f-823e-f56504308b2c\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.567979 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-nb\") pod \"46a31b0c-c17e-469f-823e-f56504308b2c\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.568017 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnthb\" (UniqueName: \"kubernetes.io/projected/46a31b0c-c17e-469f-823e-f56504308b2c-kube-api-access-tnthb\") pod \"46a31b0c-c17e-469f-823e-f56504308b2c\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.568051 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-config-data\") pod \"3b688652-b82e-48df-8ffd-8d8234672564\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.568086 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5l2ss\" (UniqueName: \"kubernetes.io/projected/3b688652-b82e-48df-8ffd-8d8234672564-kube-api-access-5l2ss\") pod \"3b688652-b82e-48df-8ffd-8d8234672564\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.568132 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b688652-b82e-48df-8ffd-8d8234672564-logs\") pod \"3b688652-b82e-48df-8ffd-8d8234672564\" (UID: \"3b688652-b82e-48df-8ffd-8d8234672564\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.568174 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-svc\") pod \"46a31b0c-c17e-469f-823e-f56504308b2c\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.568194 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-sb\") pod \"46a31b0c-c17e-469f-823e-f56504308b2c\" (UID: \"46a31b0c-c17e-469f-823e-f56504308b2c\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.573925 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-scripts" (OuterVolumeSpecName: "scripts") pod "3b688652-b82e-48df-8ffd-8d8234672564" (UID: "3b688652-b82e-48df-8ffd-8d8234672564"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.576100 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-config-data" (OuterVolumeSpecName: "config-data") pod "3b688652-b82e-48df-8ffd-8d8234672564" (UID: "3b688652-b82e-48df-8ffd-8d8234672564"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.577796 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b688652-b82e-48df-8ffd-8d8234672564-logs" (OuterVolumeSpecName: "logs") pod "3b688652-b82e-48df-8ffd-8d8234672564" (UID: "3b688652-b82e-48df-8ffd-8d8234672564"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.589743 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46a31b0c-c17e-469f-823e-f56504308b2c-kube-api-access-tnthb" (OuterVolumeSpecName: "kube-api-access-tnthb") pod "46a31b0c-c17e-469f-823e-f56504308b2c" (UID: "46a31b0c-c17e-469f-823e-f56504308b2c"). InnerVolumeSpecName "kube-api-access-tnthb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.612583 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.618993 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b688652-b82e-48df-8ffd-8d8234672564-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "3b688652-b82e-48df-8ffd-8d8234672564" (UID: "3b688652-b82e-48df-8ffd-8d8234672564"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.630472 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b688652-b82e-48df-8ffd-8d8234672564-kube-api-access-5l2ss" (OuterVolumeSpecName: "kube-api-access-5l2ss") pod "3b688652-b82e-48df-8ffd-8d8234672564" (UID: "3b688652-b82e-48df-8ffd-8d8234672564"). InnerVolumeSpecName "kube-api-access-5l2ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.632916 4755 scope.go:117] "RemoveContainer" containerID="517bae0a34d2fb10068c1debf56f959650efc3d5ea96b40fb33be7a8f4550371" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.633032 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75d8fb7cd4-vbxkn" event={"ID":"5d176bdd-fe2f-4ed0-a930-2a6ae568b400","Type":"ContainerStarted","Data":"1d08843e6bf07f768c18e3187c49250f568ed1f802dff94c5096131ab41cbc11"} Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.689100 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-668d8db6fc-jzr8v" event={"ID":"7b4dc449-172f-46cf-9e94-33827628c742","Type":"ContainerDied","Data":"dea93c2bd2d9c4f2b6e96e523f13faa6759543d4764001c36dd63226858a3469"} Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.689215 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-668d8db6fc-jzr8v" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.689104 4755 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3b688652-b82e-48df-8ffd-8d8234672564-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.694580 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "46a31b0c-c17e-469f-823e-f56504308b2c" (UID: "46a31b0c-c17e-469f-823e-f56504308b2c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.694796 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "46a31b0c-c17e-469f-823e-f56504308b2c" (UID: "46a31b0c-c17e-469f-823e-f56504308b2c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.697473 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" event={"ID":"46a31b0c-c17e-469f-823e-f56504308b2c","Type":"ContainerDied","Data":"736bd529d6daea5fd8d750db531e41f333947b1f93f84a59970a22a96b87c610"} Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.697588 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.701170 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f5b8cffb5-6gsfn" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.701303 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f5b8cffb5-6gsfn" event={"ID":"3b688652-b82e-48df-8ffd-8d8234672564","Type":"ContainerDied","Data":"12aa89ac1c1bd2cbf994621b93524156943ccead16db9f7ba612973f1f8cd120"} Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.705476 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.706907 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnthb\" (UniqueName: \"kubernetes.io/projected/46a31b0c-c17e-469f-823e-f56504308b2c-kube-api-access-tnthb\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.708444 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3b688652-b82e-48df-8ffd-8d8234672564-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.708468 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5l2ss\" (UniqueName: \"kubernetes.io/projected/3b688652-b82e-48df-8ffd-8d8234672564-kube-api-access-5l2ss\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.708477 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b688652-b82e-48df-8ffd-8d8234672564-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.709130 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "46a31b0c-c17e-469f-823e-f56504308b2c" (UID: "46a31b0c-c17e-469f-823e-f56504308b2c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: E1124 01:29:00.713280 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-rhwbp" podUID="aea62103-9b85-495d-bb71-3c69c02a3000" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.729489 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "46a31b0c-c17e-469f-823e-f56504308b2c" (UID: "46a31b0c-c17e-469f-823e-f56504308b2c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.736847 4755 scope.go:117] "RemoveContainer" containerID="29634fe605bd85d02d49022c7d12a34c46e0e4db749363cdbab033f911221084" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.748028 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-config" (OuterVolumeSpecName: "config") pod "46a31b0c-c17e-469f-823e-f56504308b2c" (UID: "46a31b0c-c17e-469f-823e-f56504308b2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.816809 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-config-data\") pod \"7b4dc449-172f-46cf-9e94-33827628c742\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.816868 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-scripts\") pod \"7b4dc449-172f-46cf-9e94-33827628c742\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.816896 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b4dc449-172f-46cf-9e94-33827628c742-logs\") pod \"7b4dc449-172f-46cf-9e94-33827628c742\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.816913 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7b4dc449-172f-46cf-9e94-33827628c742-horizon-secret-key\") pod \"7b4dc449-172f-46cf-9e94-33827628c742\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.816985 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h976n\" (UniqueName: \"kubernetes.io/projected/7b4dc449-172f-46cf-9e94-33827628c742-kube-api-access-h976n\") pod \"7b4dc449-172f-46cf-9e94-33827628c742\" (UID: \"7b4dc449-172f-46cf-9e94-33827628c742\") " Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.817279 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b4dc449-172f-46cf-9e94-33827628c742-logs" (OuterVolumeSpecName: "logs") pod "7b4dc449-172f-46cf-9e94-33827628c742" (UID: "7b4dc449-172f-46cf-9e94-33827628c742"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.817532 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-config-data" (OuterVolumeSpecName: "config-data") pod "7b4dc449-172f-46cf-9e94-33827628c742" (UID: "7b4dc449-172f-46cf-9e94-33827628c742"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.817894 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.817919 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.817931 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.817953 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b4dc449-172f-46cf-9e94-33827628c742-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.817968 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.817979 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.817988 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46a31b0c-c17e-469f-823e-f56504308b2c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.818483 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-scripts" (OuterVolumeSpecName: "scripts") pod "7b4dc449-172f-46cf-9e94-33827628c742" (UID: "7b4dc449-172f-46cf-9e94-33827628c742"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.827160 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b4dc449-172f-46cf-9e94-33827628c742-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "7b4dc449-172f-46cf-9e94-33827628c742" (UID: "7b4dc449-172f-46cf-9e94-33827628c742"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.830439 4755 scope.go:117] "RemoveContainer" containerID="9383f126143afb21db44e9f74b6428ff36c029f2dce010a7f7a7cc6953b6afdd" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.830909 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b4dc449-172f-46cf-9e94-33827628c742-kube-api-access-h976n" (OuterVolumeSpecName: "kube-api-access-h976n") pod "7b4dc449-172f-46cf-9e94-33827628c742" (UID: "7b4dc449-172f-46cf-9e94-33827628c742"). InnerVolumeSpecName "kube-api-access-h976n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.862675 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5f5b8cffb5-6gsfn"] Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.869013 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5f5b8cffb5-6gsfn"] Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.919341 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b4dc449-172f-46cf-9e94-33827628c742-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.919370 4755 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7b4dc449-172f-46cf-9e94-33827628c742-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.919380 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h976n\" (UniqueName: \"kubernetes.io/projected/7b4dc449-172f-46cf-9e94-33827628c742-kube-api-access-h976n\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:00 crc kubenswrapper[4755]: I1124 01:29:00.936838 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-55cf755d8-2cns2"] Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.057539 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-668d8db6fc-jzr8v"] Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.077895 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-668d8db6fc-jzr8v"] Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.084873 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-x9ksn"] Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.092023 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-x9ksn"] Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.200893 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-22wt6"] Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.260394 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lb2nq"] Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.401340 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.486623 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55bcc7b6cd-xzjdz"] Nov 24 01:29:01 crc kubenswrapper[4755]: W1124 01:29:01.493691 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbb6d8d0_981e_4309_a478_1f07ea12d6c7.slice/crio-16fac4ee25e32ce5a804add26328e2bbae92e23afd23196bca5e2ec4c1cfaca5 WatchSource:0}: Error finding container 16fac4ee25e32ce5a804add26328e2bbae92e23afd23196bca5e2ec4c1cfaca5: Status 404 returned error can't find the container with id 16fac4ee25e32ce5a804add26328e2bbae92e23afd23196bca5e2ec4c1cfaca5 Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.644315 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-765f9bdf9-sx8ch"] Nov 24 01:29:01 crc kubenswrapper[4755]: E1124 01:29:01.649790 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="dnsmasq-dns" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.650110 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="dnsmasq-dns" Nov 24 01:29:01 crc kubenswrapper[4755]: E1124 01:29:01.650162 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="init" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.650208 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="init" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.650451 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="dnsmasq-dns" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.657991 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.661805 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-765f9bdf9-sx8ch"] Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.662747 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.662778 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.732722 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-combined-ca-bundle\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.732813 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-ovndb-tls-certs\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.732855 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4hgv\" (UniqueName: \"kubernetes.io/projected/05c3cedf-f9a5-453f-a879-fea1939c9f87-kube-api-access-w4hgv\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.732908 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-config\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.732931 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-public-tls-certs\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.732956 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-httpd-config\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.732979 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-internal-tls-certs\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.752012 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75d8fb7cd4-vbxkn" event={"ID":"5d176bdd-fe2f-4ed0-a930-2a6ae568b400","Type":"ContainerStarted","Data":"77f34137678892ca9247192bbd83cc742be7361f9662b393f86c293d2ea2bd40"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.752092 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75d8fb7cd4-vbxkn" event={"ID":"5d176bdd-fe2f-4ed0-a930-2a6ae568b400","Type":"ContainerStarted","Data":"4cfffba567caca1a98570bd62c26ef153127d64d93ac4fd529df97c6db2cc3af"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.764401 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6","Type":"ContainerStarted","Data":"3114941a5a679c5158cabefa84ed73aae938e86d32678e80ce08d1b84c53a0e9"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.765934 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-srjbc" event={"ID":"2bed5952-1a88-4314-befd-bb76c5431cdd","Type":"ContainerStarted","Data":"08bbc10523221ffa949c537018dab1ee193ae53e6d68b8d22ed837852488c548"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.772247 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" event={"ID":"93836cb5-1416-48b2-bd66-984b5a90ee2b","Type":"ContainerStarted","Data":"ac9808c5c68d5327431494792d1f443928f39ddfa756808d434b4e93bb6f5de8"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.785889 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-75d8fb7cd4-vbxkn" podStartSLOduration=23.307820993 podStartE2EDuration="23.785870805s" podCreationTimestamp="2025-11-24 01:28:38 +0000 UTC" firstStartedPulling="2025-11-24 01:29:00.359780297 +0000 UTC m=+965.045845828" lastFinishedPulling="2025-11-24 01:29:00.837830139 +0000 UTC m=+965.523895640" observedRunningTime="2025-11-24 01:29:01.773697823 +0000 UTC m=+966.459763324" watchObservedRunningTime="2025-11-24 01:29:01.785870805 +0000 UTC m=+966.471936316" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.788994 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tgz4r" event={"ID":"ec11ae96-46e1-47a2-ae19-61941253ce7c","Type":"ContainerStarted","Data":"18cd368a49b23980abf3dddf8124917b071d5644cb9750961e3bf44d114122f5"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.800536 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-srjbc" podStartSLOduration=3.56083705 podStartE2EDuration="33.800520358s" podCreationTimestamp="2025-11-24 01:28:28 +0000 UTC" firstStartedPulling="2025-11-24 01:28:30.13718901 +0000 UTC m=+934.823254511" lastFinishedPulling="2025-11-24 01:29:00.376872308 +0000 UTC m=+965.062937819" observedRunningTime="2025-11-24 01:29:01.788723646 +0000 UTC m=+966.474789137" watchObservedRunningTime="2025-11-24 01:29:01.800520358 +0000 UTC m=+966.486585859" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.813484 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55cf755d8-2cns2" event={"ID":"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb","Type":"ContainerStarted","Data":"4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.813525 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55cf755d8-2cns2" event={"ID":"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb","Type":"ContainerStarted","Data":"1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.813534 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55cf755d8-2cns2" event={"ID":"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb","Type":"ContainerStarted","Data":"5e7e4e1884508e5dd72078cfd6c96f499ccf8a4efb96a0bdd0c0915bed4596bb"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.823376 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bcc7b6cd-xzjdz" event={"ID":"fbb6d8d0-981e-4309-a478-1f07ea12d6c7","Type":"ContainerStarted","Data":"16fac4ee25e32ce5a804add26328e2bbae92e23afd23196bca5e2ec4c1cfaca5"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.825648 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-tgz4r" podStartSLOduration=3.164910578 podStartE2EDuration="33.825639764s" podCreationTimestamp="2025-11-24 01:28:28 +0000 UTC" firstStartedPulling="2025-11-24 01:28:29.778434885 +0000 UTC m=+934.464500386" lastFinishedPulling="2025-11-24 01:29:00.439164061 +0000 UTC m=+965.125229572" observedRunningTime="2025-11-24 01:29:01.821955001 +0000 UTC m=+966.508020502" watchObservedRunningTime="2025-11-24 01:29:01.825639764 +0000 UTC m=+966.511705265" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.836280 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-config\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.836316 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-public-tls-certs\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.836343 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-httpd-config\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.836361 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-internal-tls-certs\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.836488 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-combined-ca-bundle\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.836582 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-ovndb-tls-certs\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.836653 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4hgv\" (UniqueName: \"kubernetes.io/projected/05c3cedf-f9a5-453f-a879-fea1939c9f87-kube-api-access-w4hgv\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.845423 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lb2nq" event={"ID":"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f","Type":"ContainerStarted","Data":"b0287636524bb6c59e8698d1f413543a4eedd4cdfd2da599eff96b5e768304dc"} Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.850014 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-httpd-config\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.851121 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-55cf755d8-2cns2" podStartSLOduration=24.851102681 podStartE2EDuration="24.851102681s" podCreationTimestamp="2025-11-24 01:28:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:01.842415176 +0000 UTC m=+966.528480687" watchObservedRunningTime="2025-11-24 01:29:01.851102681 +0000 UTC m=+966.537168192" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.854565 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-public-tls-certs\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.855085 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-config\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.855087 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-ovndb-tls-certs\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.857272 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-internal-tls-certs\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.857902 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c3cedf-f9a5-453f-a879-fea1939c9f87-combined-ca-bundle\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.859631 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4hgv\" (UniqueName: \"kubernetes.io/projected/05c3cedf-f9a5-453f-a879-fea1939c9f87-kube-api-access-w4hgv\") pod \"neutron-765f9bdf9-sx8ch\" (UID: \"05c3cedf-f9a5-453f-a879-fea1939c9f87\") " pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.883742 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-lb2nq" podStartSLOduration=2.883716999 podStartE2EDuration="2.883716999s" podCreationTimestamp="2025-11-24 01:28:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:01.8827015 +0000 UTC m=+966.568767011" watchObservedRunningTime="2025-11-24 01:29:01.883716999 +0000 UTC m=+966.569782490" Nov 24 01:29:01 crc kubenswrapper[4755]: I1124 01:29:01.995193 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:29:02 crc kubenswrapper[4755]: I1124 01:29:02.011887 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:02 crc kubenswrapper[4755]: I1124 01:29:02.021725 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b688652-b82e-48df-8ffd-8d8234672564" path="/var/lib/kubelet/pods/3b688652-b82e-48df-8ffd-8d8234672564/volumes" Nov 24 01:29:02 crc kubenswrapper[4755]: I1124 01:29:02.022103 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" path="/var/lib/kubelet/pods/46a31b0c-c17e-469f-823e-f56504308b2c/volumes" Nov 24 01:29:02 crc kubenswrapper[4755]: I1124 01:29:02.022869 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b4dc449-172f-46cf-9e94-33827628c742" path="/var/lib/kubelet/pods/7b4dc449-172f-46cf-9e94-33827628c742/volumes" Nov 24 01:29:02 crc kubenswrapper[4755]: I1124 01:29:02.863302 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a10198c5-a145-4df1-a99d-14463ff5d048","Type":"ContainerStarted","Data":"08d45d502b608d93ea437a15a21062307d4b751bbc64ca14cc07d87bdd0d2352"} Nov 24 01:29:02 crc kubenswrapper[4755]: I1124 01:29:02.864894 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lb2nq" event={"ID":"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f","Type":"ContainerStarted","Data":"eff9cc0342e0869618d644a3b0b2d906ff00fdc000c45039d9e76333aadbcc40"} Nov 24 01:29:02 crc kubenswrapper[4755]: I1124 01:29:02.869508 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6","Type":"ContainerStarted","Data":"77b5cc9d31c3d1621de26fb42112358d0e9e5730c5093b86ffb25f07b3fc67f6"} Nov 24 01:29:02 crc kubenswrapper[4755]: I1124 01:29:02.870752 4755 generic.go:334] "Generic (PLEG): container finished" podID="93836cb5-1416-48b2-bd66-984b5a90ee2b" containerID="266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5" exitCode=0 Nov 24 01:29:02 crc kubenswrapper[4755]: I1124 01:29:02.870786 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" event={"ID":"93836cb5-1416-48b2-bd66-984b5a90ee2b","Type":"ContainerDied","Data":"266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5"} Nov 24 01:29:02 crc kubenswrapper[4755]: I1124 01:29:02.872389 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bcc7b6cd-xzjdz" event={"ID":"fbb6d8d0-981e-4309-a478-1f07ea12d6c7","Type":"ContainerStarted","Data":"bcf0488860390ca50c5aa5b2512ecdca965b8aa8f5e6379eb739b2408cb67c84"} Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.268320 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-765f9bdf9-sx8ch"] Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.296813 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.296896 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.296948 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.297957 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d1576ec75c38e5c634d28cddad8ee45995a487ee45005883f5a41207a6c2c9de"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.298020 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://d1576ec75c38e5c634d28cddad8ee45995a487ee45005883f5a41207a6c2c9de" gracePeriod=600 Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.892294 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" event={"ID":"93836cb5-1416-48b2-bd66-984b5a90ee2b","Type":"ContainerStarted","Data":"beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd"} Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.894533 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.905325 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-765f9bdf9-sx8ch" event={"ID":"05c3cedf-f9a5-453f-a879-fea1939c9f87","Type":"ContainerStarted","Data":"8aa132564696b57d5e3c7fe0d684f31170a0469a2fef638485eee6a8f2731d94"} Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.918199 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" podStartSLOduration=4.918168214 podStartE2EDuration="4.918168214s" podCreationTimestamp="2025-11-24 01:28:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:03.911395494 +0000 UTC m=+968.597461005" watchObservedRunningTime="2025-11-24 01:29:03.918168214 +0000 UTC m=+968.604233755" Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.918868 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7e96f37-574f-4900-88f9-33dc41179807","Type":"ContainerStarted","Data":"f9e6e8f3d6be6e25c707568e1e71cb08d1ef2b9bc56285251a356b84cfbe3a19"} Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.925639 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="d1576ec75c38e5c634d28cddad8ee45995a487ee45005883f5a41207a6c2c9de" exitCode=0 Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.925730 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"d1576ec75c38e5c634d28cddad8ee45995a487ee45005883f5a41207a6c2c9de"} Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.925768 4755 scope.go:117] "RemoveContainer" containerID="31c763569028cdcbeab7620c7ace03dd90f3c86c98eb54fc2ca5ba33d792fb99" Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.929980 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bcc7b6cd-xzjdz" event={"ID":"fbb6d8d0-981e-4309-a478-1f07ea12d6c7","Type":"ContainerStarted","Data":"bc1a0c840395721f8fa147c4be20748da99022eabfad9c5a5875326fc32d6e21"} Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.943921 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:29:03 crc kubenswrapper[4755]: I1124 01:29:03.966157 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-55bcc7b6cd-xzjdz" podStartSLOduration=4.966138524 podStartE2EDuration="4.966138524s" podCreationTimestamp="2025-11-24 01:28:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:03.96386079 +0000 UTC m=+968.649926291" watchObservedRunningTime="2025-11-24 01:29:03.966138524 +0000 UTC m=+968.652204025" Nov 24 01:29:04 crc kubenswrapper[4755]: I1124 01:29:04.963407 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a10198c5-a145-4df1-a99d-14463ff5d048","Type":"ContainerStarted","Data":"4d843b7361a4e224478832ce3c5543fa05a489c9a8224647d78d6682f80906d0"} Nov 24 01:29:04 crc kubenswrapper[4755]: I1124 01:29:04.963862 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a10198c5-a145-4df1-a99d-14463ff5d048","Type":"ContainerStarted","Data":"87d2ffcd285ff3e2495102203a6f65d2a346cefbab759e72ba2700e355a221a8"} Nov 24 01:29:04 crc kubenswrapper[4755]: I1124 01:29:04.965941 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6","Type":"ContainerStarted","Data":"3437f900209c8262b3253caf386661d184e82f24bd4db4c41fb76179ac5b147a"} Nov 24 01:29:04 crc kubenswrapper[4755]: I1124 01:29:04.967635 4755 generic.go:334] "Generic (PLEG): container finished" podID="ec11ae96-46e1-47a2-ae19-61941253ce7c" containerID="18cd368a49b23980abf3dddf8124917b071d5644cb9750961e3bf44d114122f5" exitCode=0 Nov 24 01:29:04 crc kubenswrapper[4755]: I1124 01:29:04.967710 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tgz4r" event={"ID":"ec11ae96-46e1-47a2-ae19-61941253ce7c","Type":"ContainerDied","Data":"18cd368a49b23980abf3dddf8124917b071d5644cb9750961e3bf44d114122f5"} Nov 24 01:29:04 crc kubenswrapper[4755]: I1124 01:29:04.982190 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-765f9bdf9-sx8ch" event={"ID":"05c3cedf-f9a5-453f-a879-fea1939c9f87","Type":"ContainerStarted","Data":"de51261dd19fe42017115bf6f2e72378562522e994c2c46490d3de49f5021c22"} Nov 24 01:29:04 crc kubenswrapper[4755]: I1124 01:29:04.982233 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-765f9bdf9-sx8ch" event={"ID":"05c3cedf-f9a5-453f-a879-fea1939c9f87","Type":"ContainerStarted","Data":"2e9dd700d90b05eb57192f71f39dca3ac012dbb7010c613365f00125d921a39a"} Nov 24 01:29:04 crc kubenswrapper[4755]: I1124 01:29:04.982356 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:04 crc kubenswrapper[4755]: I1124 01:29:04.987462 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"77857823e666ac5615c021b67cb4fcc6f558c850cc69c4d388ccf77b95626fc7"} Nov 24 01:29:04 crc kubenswrapper[4755]: I1124 01:29:04.994691 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=21.994665346 podStartE2EDuration="21.994665346s" podCreationTimestamp="2025-11-24 01:28:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:04.984637353 +0000 UTC m=+969.670702854" watchObservedRunningTime="2025-11-24 01:29:04.994665346 +0000 UTC m=+969.680730857" Nov 24 01:29:05 crc kubenswrapper[4755]: I1124 01:29:05.078512 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-765f9bdf9-sx8ch" podStartSLOduration=4.078493004 podStartE2EDuration="4.078493004s" podCreationTimestamp="2025-11-24 01:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:05.077745373 +0000 UTC m=+969.763810874" watchObservedRunningTime="2025-11-24 01:29:05.078493004 +0000 UTC m=+969.764558505" Nov 24 01:29:05 crc kubenswrapper[4755]: I1124 01:29:05.120833 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-x9ksn" podUID="46a31b0c-c17e-469f-823e-f56504308b2c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: i/o timeout" Nov 24 01:29:06 crc kubenswrapper[4755]: I1124 01:29:06.002668 4755 generic.go:334] "Generic (PLEG): container finished" podID="ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" containerID="eff9cc0342e0869618d644a3b0b2d906ff00fdc000c45039d9e76333aadbcc40" exitCode=0 Nov 24 01:29:06 crc kubenswrapper[4755]: I1124 01:29:06.015862 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lb2nq" event={"ID":"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f","Type":"ContainerDied","Data":"eff9cc0342e0869618d644a3b0b2d906ff00fdc000c45039d9e76333aadbcc40"} Nov 24 01:29:06 crc kubenswrapper[4755]: I1124 01:29:06.064468 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=10.062264057 podStartE2EDuration="10.062264057s" podCreationTimestamp="2025-11-24 01:28:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:06.049088326 +0000 UTC m=+970.735153857" watchObservedRunningTime="2025-11-24 01:29:06.062264057 +0000 UTC m=+970.748329558" Nov 24 01:29:06 crc kubenswrapper[4755]: I1124 01:29:06.985060 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 24 01:29:06 crc kubenswrapper[4755]: I1124 01:29:06.985337 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.013293 4755 generic.go:334] "Generic (PLEG): container finished" podID="2bed5952-1a88-4314-befd-bb76c5431cdd" containerID="08bbc10523221ffa949c537018dab1ee193ae53e6d68b8d22ed837852488c548" exitCode=0 Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.013395 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-srjbc" event={"ID":"2bed5952-1a88-4314-befd-bb76c5431cdd","Type":"ContainerDied","Data":"08bbc10523221ffa949c537018dab1ee193ae53e6d68b8d22ed837852488c548"} Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.031583 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.041594 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.866319 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tgz4r" Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.872901 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.998880 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-credential-keys\") pod \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.998981 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-scripts\") pod \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.999006 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpxxc\" (UniqueName: \"kubernetes.io/projected/ec11ae96-46e1-47a2-ae19-61941253ce7c-kube-api-access-gpxxc\") pod \"ec11ae96-46e1-47a2-ae19-61941253ce7c\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.999069 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8qkn\" (UniqueName: \"kubernetes.io/projected/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-kube-api-access-k8qkn\") pod \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.999099 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-combined-ca-bundle\") pod \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.999123 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-config-data\") pod \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.999149 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-combined-ca-bundle\") pod \"ec11ae96-46e1-47a2-ae19-61941253ce7c\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.999213 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-fernet-keys\") pod \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\" (UID: \"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f\") " Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.999238 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec11ae96-46e1-47a2-ae19-61941253ce7c-logs\") pod \"ec11ae96-46e1-47a2-ae19-61941253ce7c\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.999261 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-scripts\") pod \"ec11ae96-46e1-47a2-ae19-61941253ce7c\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " Nov 24 01:29:07 crc kubenswrapper[4755]: I1124 01:29:07.999311 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-config-data\") pod \"ec11ae96-46e1-47a2-ae19-61941253ce7c\" (UID: \"ec11ae96-46e1-47a2-ae19-61941253ce7c\") " Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.000899 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec11ae96-46e1-47a2-ae19-61941253ce7c-logs" (OuterVolumeSpecName: "logs") pod "ec11ae96-46e1-47a2-ae19-61941253ce7c" (UID: "ec11ae96-46e1-47a2-ae19-61941253ce7c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.011908 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-kube-api-access-k8qkn" (OuterVolumeSpecName: "kube-api-access-k8qkn") pod "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" (UID: "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f"). InnerVolumeSpecName "kube-api-access-k8qkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.016704 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-scripts" (OuterVolumeSpecName: "scripts") pod "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" (UID: "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.016751 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" (UID: "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.017523 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec11ae96-46e1-47a2-ae19-61941253ce7c-kube-api-access-gpxxc" (OuterVolumeSpecName: "kube-api-access-gpxxc") pod "ec11ae96-46e1-47a2-ae19-61941253ce7c" (UID: "ec11ae96-46e1-47a2-ae19-61941253ce7c"). InnerVolumeSpecName "kube-api-access-gpxxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.019882 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-scripts" (OuterVolumeSpecName: "scripts") pod "ec11ae96-46e1-47a2-ae19-61941253ce7c" (UID: "ec11ae96-46e1-47a2-ae19-61941253ce7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.023289 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" (UID: "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.054504 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lb2nq" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.056269 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tgz4r" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.063750 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-config-data" (OuterVolumeSpecName: "config-data") pod "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" (UID: "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.067687 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec11ae96-46e1-47a2-ae19-61941253ce7c" (UID: "ec11ae96-46e1-47a2-ae19-61941253ce7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.081192 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-config-data" (OuterVolumeSpecName: "config-data") pod "ec11ae96-46e1-47a2-ae19-61941253ce7c" (UID: "ec11ae96-46e1-47a2-ae19-61941253ce7c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.081828 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" (UID: "ebd8073d-4cda-4515-8999-cfdb8a4fcc7f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.101994 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.102043 4755 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.102057 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.102069 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpxxc\" (UniqueName: \"kubernetes.io/projected/ec11ae96-46e1-47a2-ae19-61941253ce7c-kube-api-access-gpxxc\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.102082 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8qkn\" (UniqueName: \"kubernetes.io/projected/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-kube-api-access-k8qkn\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.102093 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.102104 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.102118 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.102133 4755 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.102148 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec11ae96-46e1-47a2-ae19-61941253ce7c-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.102160 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec11ae96-46e1-47a2-ae19-61941253ce7c-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.244731 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.245038 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lb2nq" event={"ID":"ebd8073d-4cda-4515-8999-cfdb8a4fcc7f","Type":"ContainerDied","Data":"b0287636524bb6c59e8698d1f413543a4eedd4cdfd2da599eff96b5e768304dc"} Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.245062 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0287636524bb6c59e8698d1f413543a4eedd4cdfd2da599eff96b5e768304dc" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.245072 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tgz4r" event={"ID":"ec11ae96-46e1-47a2-ae19-61941253ce7c","Type":"ContainerDied","Data":"09154f26ddb8a0ea2894ce5fbc142ad4a70b2c5e0267872d5f6dc1a415590e8d"} Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.245082 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09154f26ddb8a0ea2894ce5fbc142ad4a70b2c5e0267872d5f6dc1a415590e8d" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.245093 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-68b755649b-gdjxt"] Nov 24 01:29:08 crc kubenswrapper[4755]: E1124 01:29:08.245444 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" containerName="keystone-bootstrap" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.245465 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" containerName="keystone-bootstrap" Nov 24 01:29:08 crc kubenswrapper[4755]: E1124 01:29:08.245491 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec11ae96-46e1-47a2-ae19-61941253ce7c" containerName="placement-db-sync" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.245497 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec11ae96-46e1-47a2-ae19-61941253ce7c" containerName="placement-db-sync" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.245678 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" containerName="keystone-bootstrap" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.245688 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec11ae96-46e1-47a2-ae19-61941253ce7c" containerName="placement-db-sync" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.246159 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-68b755649b-gdjxt"] Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.246180 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.246261 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.252055 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.265836 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.266200 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.266369 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.357505 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.360545 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.411988 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-internal-tls-certs\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.412034 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-credential-keys\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.412078 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-config-data\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.412130 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-scripts\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.412145 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qghs8\" (UniqueName: \"kubernetes.io/projected/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-kube-api-access-qghs8\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.412168 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-fernet-keys\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.412240 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-combined-ca-bundle\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.412269 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-public-tls-certs\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.513689 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-combined-ca-bundle\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.514039 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-public-tls-certs\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.514139 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-internal-tls-certs\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.514220 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-credential-keys\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.514314 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-config-data\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.514398 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-scripts\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.514467 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qghs8\" (UniqueName: \"kubernetes.io/projected/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-kube-api-access-qghs8\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.514550 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-fernet-keys\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.520146 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-combined-ca-bundle\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.520914 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-public-tls-certs\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.522784 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-scripts\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.522980 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-fernet-keys\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.523885 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-internal-tls-certs\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.525476 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-config-data\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.526031 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-credential-keys\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.535845 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qghs8\" (UniqueName: \"kubernetes.io/projected/56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f-kube-api-access-qghs8\") pod \"keystone-68b755649b-gdjxt\" (UID: \"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f\") " pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.608927 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.980787 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8674657456-64797"] Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.982207 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8674657456-64797" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.984908 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.984992 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.985290 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.985482 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-j78z7" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.985612 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 24 01:29:08 crc kubenswrapper[4755]: I1124 01:29:08.995094 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8674657456-64797"] Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.124363 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-public-tls-certs\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.124434 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl4zx\" (UniqueName: \"kubernetes.io/projected/521180f5-3721-4b4d-8359-e7b69268a36a-kube-api-access-hl4zx\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.124493 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/521180f5-3721-4b4d-8359-e7b69268a36a-logs\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.124518 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-internal-tls-certs\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.124693 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-combined-ca-bundle\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.124792 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-config-data\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.124815 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-scripts\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.226946 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-combined-ca-bundle\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.227048 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-config-data\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.227078 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-scripts\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.227222 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-public-tls-certs\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.227289 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl4zx\" (UniqueName: \"kubernetes.io/projected/521180f5-3721-4b4d-8359-e7b69268a36a-kube-api-access-hl4zx\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.227400 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/521180f5-3721-4b4d-8359-e7b69268a36a-logs\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.227440 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-internal-tls-certs\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.228248 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/521180f5-3721-4b4d-8359-e7b69268a36a-logs\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.233018 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-internal-tls-certs\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.233053 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-public-tls-certs\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.246863 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-combined-ca-bundle\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.247263 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl4zx\" (UniqueName: \"kubernetes.io/projected/521180f5-3721-4b4d-8359-e7b69268a36a-kube-api-access-hl4zx\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.248001 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-scripts\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.249421 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/521180f5-3721-4b4d-8359-e7b69268a36a-config-data\") pod \"placement-8674657456-64797\" (UID: \"521180f5-3721-4b4d-8359-e7b69268a36a\") " pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.299511 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8674657456-64797" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.604823 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.658312 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-27drm"] Nov 24 01:29:09 crc kubenswrapper[4755]: I1124 01:29:09.658540 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" podUID="19be7f69-ce3a-4c28-8934-885d997016ff" containerName="dnsmasq-dns" containerID="cri-o://1721d87ddbf2367aba3c08cef98ee96b7c9b53345ff588cf3321efb323d739b6" gracePeriod=10 Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.084796 4755 generic.go:334] "Generic (PLEG): container finished" podID="19be7f69-ce3a-4c28-8934-885d997016ff" containerID="1721d87ddbf2367aba3c08cef98ee96b7c9b53345ff588cf3321efb323d739b6" exitCode=0 Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.085074 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.084904 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" event={"ID":"19be7f69-ce3a-4c28-8934-885d997016ff","Type":"ContainerDied","Data":"1721d87ddbf2367aba3c08cef98ee96b7c9b53345ff588cf3321efb323d739b6"} Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.181225 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.185654 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.867941 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-srjbc" Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.970335 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-combined-ca-bundle\") pod \"2bed5952-1a88-4314-befd-bb76c5431cdd\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.970403 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-485q7\" (UniqueName: \"kubernetes.io/projected/2bed5952-1a88-4314-befd-bb76c5431cdd-kube-api-access-485q7\") pod \"2bed5952-1a88-4314-befd-bb76c5431cdd\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.970482 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-db-sync-config-data\") pod \"2bed5952-1a88-4314-befd-bb76c5431cdd\" (UID: \"2bed5952-1a88-4314-befd-bb76c5431cdd\") " Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.983991 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bed5952-1a88-4314-befd-bb76c5431cdd-kube-api-access-485q7" (OuterVolumeSpecName: "kube-api-access-485q7") pod "2bed5952-1a88-4314-befd-bb76c5431cdd" (UID: "2bed5952-1a88-4314-befd-bb76c5431cdd"). InnerVolumeSpecName "kube-api-access-485q7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:10 crc kubenswrapper[4755]: I1124 01:29:10.989612 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2bed5952-1a88-4314-befd-bb76c5431cdd" (UID: "2bed5952-1a88-4314-befd-bb76c5431cdd"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.002811 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2bed5952-1a88-4314-befd-bb76c5431cdd" (UID: "2bed5952-1a88-4314-befd-bb76c5431cdd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.075201 4755 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.075231 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bed5952-1a88-4314-befd-bb76c5431cdd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.075256 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-485q7\" (UniqueName: \"kubernetes.io/projected/2bed5952-1a88-4314-befd-bb76c5431cdd-kube-api-access-485q7\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.080035 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.094140 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" event={"ID":"19be7f69-ce3a-4c28-8934-885d997016ff","Type":"ContainerDied","Data":"7815090864a33006b8118ff978ebf97aa09f038dcfdff5d89e55692fc01bc6e4"} Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.094193 4755 scope.go:117] "RemoveContainer" containerID="1721d87ddbf2367aba3c08cef98ee96b7c9b53345ff588cf3321efb323d739b6" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.094350 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-27drm" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.107721 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-srjbc" event={"ID":"2bed5952-1a88-4314-befd-bb76c5431cdd","Type":"ContainerDied","Data":"5b9b1ec96f8af8453f710d26619dac4942703437586b131f3dbda8a03666e5ac"} Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.107891 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b9b1ec96f8af8453f710d26619dac4942703437586b131f3dbda8a03666e5ac" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.107761 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-srjbc" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.132112 4755 scope.go:117] "RemoveContainer" containerID="36a2644c35e228b5d798e51b6eba96035be4caddb427027902c5a10828b5e7fd" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.176615 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsxjk\" (UniqueName: \"kubernetes.io/projected/19be7f69-ce3a-4c28-8934-885d997016ff-kube-api-access-xsxjk\") pod \"19be7f69-ce3a-4c28-8934-885d997016ff\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.177065 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-sb\") pod \"19be7f69-ce3a-4c28-8934-885d997016ff\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.177157 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-swift-storage-0\") pod \"19be7f69-ce3a-4c28-8934-885d997016ff\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.177233 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-nb\") pod \"19be7f69-ce3a-4c28-8934-885d997016ff\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.177392 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-config\") pod \"19be7f69-ce3a-4c28-8934-885d997016ff\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.177447 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-svc\") pod \"19be7f69-ce3a-4c28-8934-885d997016ff\" (UID: \"19be7f69-ce3a-4c28-8934-885d997016ff\") " Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.207623 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19be7f69-ce3a-4c28-8934-885d997016ff-kube-api-access-xsxjk" (OuterVolumeSpecName: "kube-api-access-xsxjk") pod "19be7f69-ce3a-4c28-8934-885d997016ff" (UID: "19be7f69-ce3a-4c28-8934-885d997016ff"). InnerVolumeSpecName "kube-api-access-xsxjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.238184 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-config" (OuterVolumeSpecName: "config") pod "19be7f69-ce3a-4c28-8934-885d997016ff" (UID: "19be7f69-ce3a-4c28-8934-885d997016ff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.265774 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "19be7f69-ce3a-4c28-8934-885d997016ff" (UID: "19be7f69-ce3a-4c28-8934-885d997016ff"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.280077 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.280112 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsxjk\" (UniqueName: \"kubernetes.io/projected/19be7f69-ce3a-4c28-8934-885d997016ff-kube-api-access-xsxjk\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.280124 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.285712 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "19be7f69-ce3a-4c28-8934-885d997016ff" (UID: "19be7f69-ce3a-4c28-8934-885d997016ff"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.288049 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "19be7f69-ce3a-4c28-8934-885d997016ff" (UID: "19be7f69-ce3a-4c28-8934-885d997016ff"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.290745 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "19be7f69-ce3a-4c28-8934-885d997016ff" (UID: "19be7f69-ce3a-4c28-8934-885d997016ff"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.321976 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-68b755649b-gdjxt"] Nov 24 01:29:11 crc kubenswrapper[4755]: W1124 01:29:11.324013 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56aa6d38_fae0_456e_8f3e_1dfc9e21aa0f.slice/crio-a9514805f4573b730a24489259f73b9549abb4766aab7fdbf83aa28e86e1298d WatchSource:0}: Error finding container a9514805f4573b730a24489259f73b9549abb4766aab7fdbf83aa28e86e1298d: Status 404 returned error can't find the container with id a9514805f4573b730a24489259f73b9549abb4766aab7fdbf83aa28e86e1298d Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.381574 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.381620 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.381630 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/19be7f69-ce3a-4c28-8934-885d997016ff-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.440009 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-27drm"] Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.450504 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-27drm"] Nov 24 01:29:11 crc kubenswrapper[4755]: I1124 01:29:11.486723 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8674657456-64797"] Nov 24 01:29:11 crc kubenswrapper[4755]: W1124 01:29:11.490643 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod521180f5_3721_4b4d_8359_e7b69268a36a.slice/crio-226c90e3a7fc56417ddd20a6d313cde1bc599f9b02737d3e1f00e8818f58f831 WatchSource:0}: Error finding container 226c90e3a7fc56417ddd20a6d313cde1bc599f9b02737d3e1f00e8818f58f831: Status 404 returned error can't find the container with id 226c90e3a7fc56417ddd20a6d313cde1bc599f9b02737d3e1f00e8818f58f831 Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.007572 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19be7f69-ce3a-4c28-8934-885d997016ff" path="/var/lib/kubelet/pods/19be7f69-ce3a-4c28-8934-885d997016ff/volumes" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.114669 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-86fc6cb5d-5rbfc"] Nov 24 01:29:12 crc kubenswrapper[4755]: E1124 01:29:12.115439 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bed5952-1a88-4314-befd-bb76c5431cdd" containerName="barbican-db-sync" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.115463 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bed5952-1a88-4314-befd-bb76c5431cdd" containerName="barbican-db-sync" Nov 24 01:29:12 crc kubenswrapper[4755]: E1124 01:29:12.115488 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19be7f69-ce3a-4c28-8934-885d997016ff" containerName="dnsmasq-dns" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.115498 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="19be7f69-ce3a-4c28-8934-885d997016ff" containerName="dnsmasq-dns" Nov 24 01:29:12 crc kubenswrapper[4755]: E1124 01:29:12.115514 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19be7f69-ce3a-4c28-8934-885d997016ff" containerName="init" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.115521 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="19be7f69-ce3a-4c28-8934-885d997016ff" containerName="init" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.115784 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="19be7f69-ce3a-4c28-8934-885d997016ff" containerName="dnsmasq-dns" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.115809 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bed5952-1a88-4314-befd-bb76c5431cdd" containerName="barbican-db-sync" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.116983 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-545bd7b455-5w47r"] Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.118453 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.119043 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.125172 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.125372 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.125428 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.142406 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-9kkj6" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.151192 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-545bd7b455-5w47r"] Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.173879 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-86fc6cb5d-5rbfc"] Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.173910 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-68b755649b-gdjxt" event={"ID":"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f","Type":"ContainerStarted","Data":"39bfbf0cb6f09d7e12c5374f311202007ae4fc9f7fcf1de4294e93322cc53546"} Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.173926 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-68b755649b-gdjxt" event={"ID":"56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f","Type":"ContainerStarted","Data":"a9514805f4573b730a24489259f73b9549abb4766aab7fdbf83aa28e86e1298d"} Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.173940 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.193877 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8674657456-64797" event={"ID":"521180f5-3721-4b4d-8359-e7b69268a36a","Type":"ContainerStarted","Data":"5a8cf63f8afcb1091450ea71ccdeffdca5eff88c7cbaaaf972d1fd94bfe0d880"} Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.193922 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8674657456-64797" event={"ID":"521180f5-3721-4b4d-8359-e7b69268a36a","Type":"ContainerStarted","Data":"bc3399ef892f73dd0a60c7637abac5c3fdc49db4c11ad88c9541edae0c8c2cc1"} Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.193934 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8674657456-64797" event={"ID":"521180f5-3721-4b4d-8359-e7b69268a36a","Type":"ContainerStarted","Data":"226c90e3a7fc56417ddd20a6d313cde1bc599f9b02737d3e1f00e8818f58f831"} Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.194718 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8674657456-64797" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.194763 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8674657456-64797" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.198594 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7e96f37-574f-4900-88f9-33dc41179807","Type":"ContainerStarted","Data":"32ef1e5e5d2a7788aad23e607b6e88ccd77e35194ff72e7c65a23a13a8460145"} Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.201374 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b447c315-6a28-4a18-af48-fbcf84cd0c00-combined-ca-bundle\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.201444 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4100cddd-df77-4b8a-af0c-746bbd98c80f-logs\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.201465 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b447c315-6a28-4a18-af48-fbcf84cd0c00-config-data\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.201505 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44hvs\" (UniqueName: \"kubernetes.io/projected/4100cddd-df77-4b8a-af0c-746bbd98c80f-kube-api-access-44hvs\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.201548 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b447c315-6a28-4a18-af48-fbcf84cd0c00-config-data-custom\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.201593 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kp6h\" (UniqueName: \"kubernetes.io/projected/b447c315-6a28-4a18-af48-fbcf84cd0c00-kube-api-access-5kp6h\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.201653 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4100cddd-df77-4b8a-af0c-746bbd98c80f-config-data\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.201722 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b447c315-6a28-4a18-af48-fbcf84cd0c00-logs\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.201795 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4100cddd-df77-4b8a-af0c-746bbd98c80f-combined-ca-bundle\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.201866 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4100cddd-df77-4b8a-af0c-746bbd98c80f-config-data-custom\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.267700 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5xwvl"] Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.269170 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.290220 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5xwvl"] Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.301860 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-68b755649b-gdjxt" podStartSLOduration=4.301839378 podStartE2EDuration="4.301839378s" podCreationTimestamp="2025-11-24 01:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:12.219530997 +0000 UTC m=+976.905596508" watchObservedRunningTime="2025-11-24 01:29:12.301839378 +0000 UTC m=+976.987904869" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.303367 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b447c315-6a28-4a18-af48-fbcf84cd0c00-config-data-custom\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.303429 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kp6h\" (UniqueName: \"kubernetes.io/projected/b447c315-6a28-4a18-af48-fbcf84cd0c00-kube-api-access-5kp6h\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.303493 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4100cddd-df77-4b8a-af0c-746bbd98c80f-config-data\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.303599 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b447c315-6a28-4a18-af48-fbcf84cd0c00-logs\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.303713 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4100cddd-df77-4b8a-af0c-746bbd98c80f-combined-ca-bundle\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.303850 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4100cddd-df77-4b8a-af0c-746bbd98c80f-config-data-custom\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.303910 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b447c315-6a28-4a18-af48-fbcf84cd0c00-combined-ca-bundle\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.303948 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4100cddd-df77-4b8a-af0c-746bbd98c80f-logs\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.303972 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b447c315-6a28-4a18-af48-fbcf84cd0c00-config-data\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.304002 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44hvs\" (UniqueName: \"kubernetes.io/projected/4100cddd-df77-4b8a-af0c-746bbd98c80f-kube-api-access-44hvs\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.306304 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-8674657456-64797" podStartSLOduration=4.306290611 podStartE2EDuration="4.306290611s" podCreationTimestamp="2025-11-24 01:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:12.285770175 +0000 UTC m=+976.971835676" watchObservedRunningTime="2025-11-24 01:29:12.306290611 +0000 UTC m=+976.992356112" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.307778 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b447c315-6a28-4a18-af48-fbcf84cd0c00-logs\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.318186 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4100cddd-df77-4b8a-af0c-746bbd98c80f-logs\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.327032 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4100cddd-df77-4b8a-af0c-746bbd98c80f-combined-ca-bundle\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.327974 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b447c315-6a28-4a18-af48-fbcf84cd0c00-config-data\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.329406 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b447c315-6a28-4a18-af48-fbcf84cd0c00-config-data-custom\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.330447 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4100cddd-df77-4b8a-af0c-746bbd98c80f-config-data\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.332875 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kp6h\" (UniqueName: \"kubernetes.io/projected/b447c315-6a28-4a18-af48-fbcf84cd0c00-kube-api-access-5kp6h\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.338198 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b447c315-6a28-4a18-af48-fbcf84cd0c00-combined-ca-bundle\") pod \"barbican-keystone-listener-86fc6cb5d-5rbfc\" (UID: \"b447c315-6a28-4a18-af48-fbcf84cd0c00\") " pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.340350 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44hvs\" (UniqueName: \"kubernetes.io/projected/4100cddd-df77-4b8a-af0c-746bbd98c80f-kube-api-access-44hvs\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.345365 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4100cddd-df77-4b8a-af0c-746bbd98c80f-config-data-custom\") pod \"barbican-worker-545bd7b455-5w47r\" (UID: \"4100cddd-df77-4b8a-af0c-746bbd98c80f\") " pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.406066 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.406145 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.406189 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.406222 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.406253 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqrm5\" (UniqueName: \"kubernetes.io/projected/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-kube-api-access-mqrm5\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.406290 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-config\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.453377 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-545bd7b455-5w47r" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.461175 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.464769 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-74db9f6546-bvfjf"] Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.466851 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.471557 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.507564 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.507650 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.507690 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.507724 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqrm5\" (UniqueName: \"kubernetes.io/projected/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-kube-api-access-mqrm5\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.507771 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-config\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.507809 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.509649 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-74db9f6546-bvfjf"] Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.510211 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-config\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.511910 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.514268 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.515344 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.546152 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.562369 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqrm5\" (UniqueName: \"kubernetes.io/projected/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-kube-api-access-mqrm5\") pod \"dnsmasq-dns-75c8ddd69c-5xwvl\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.614286 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3431a799-d534-4932-8dad-1d2e49a74737-logs\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.614620 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data-custom\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.614654 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.614726 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crxp8\" (UniqueName: \"kubernetes.io/projected/3431a799-d534-4932-8dad-1d2e49a74737-kube-api-access-crxp8\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.614749 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-combined-ca-bundle\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.661136 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.717575 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data-custom\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.717672 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.717743 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crxp8\" (UniqueName: \"kubernetes.io/projected/3431a799-d534-4932-8dad-1d2e49a74737-kube-api-access-crxp8\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.717761 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-combined-ca-bundle\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.717788 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3431a799-d534-4932-8dad-1d2e49a74737-logs\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.718193 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3431a799-d534-4932-8dad-1d2e49a74737-logs\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.735490 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data-custom\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.738179 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.742230 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-combined-ca-bundle\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.761364 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crxp8\" (UniqueName: \"kubernetes.io/projected/3431a799-d534-4932-8dad-1d2e49a74737-kube-api-access-crxp8\") pod \"barbican-api-74db9f6546-bvfjf\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:12 crc kubenswrapper[4755]: I1124 01:29:12.814063 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.160429 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-86fc6cb5d-5rbfc"] Nov 24 01:29:13 crc kubenswrapper[4755]: W1124 01:29:13.179199 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb447c315_6a28_4a18_af48_fbcf84cd0c00.slice/crio-936496a477d2496e52411cdf43aa76ee019feedca92b970803c13393757299ac WatchSource:0}: Error finding container 936496a477d2496e52411cdf43aa76ee019feedca92b970803c13393757299ac: Status 404 returned error can't find the container with id 936496a477d2496e52411cdf43aa76ee019feedca92b970803c13393757299ac Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.216631 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" event={"ID":"b447c315-6a28-4a18-af48-fbcf84cd0c00","Type":"ContainerStarted","Data":"936496a477d2496e52411cdf43aa76ee019feedca92b970803c13393757299ac"} Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.312714 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-545bd7b455-5w47r"] Nov 24 01:29:13 crc kubenswrapper[4755]: W1124 01:29:13.322160 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4100cddd_df77_4b8a_af0c_746bbd98c80f.slice/crio-db44004c0f7964254e183f7a2c0796aedcb75841a052d9f3fcbf039ee1b095fa WatchSource:0}: Error finding container db44004c0f7964254e183f7a2c0796aedcb75841a052d9f3fcbf039ee1b095fa: Status 404 returned error can't find the container with id db44004c0f7964254e183f7a2c0796aedcb75841a052d9f3fcbf039ee1b095fa Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.456426 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5xwvl"] Nov 24 01:29:13 crc kubenswrapper[4755]: W1124 01:29:13.460628 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2cd4ba5f_a6b8_4ccd_9053_5bc65a366e21.slice/crio-37512cf43e6678242b8230c766cd97b89a35fb63b23f34d7b92d3301ace34457 WatchSource:0}: Error finding container 37512cf43e6678242b8230c766cd97b89a35fb63b23f34d7b92d3301ace34457: Status 404 returned error can't find the container with id 37512cf43e6678242b8230c766cd97b89a35fb63b23f34d7b92d3301ace34457 Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.475902 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-74db9f6546-bvfjf"] Nov 24 01:29:13 crc kubenswrapper[4755]: W1124 01:29:13.501096 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3431a799_d534_4932_8dad_1d2e49a74737.slice/crio-d1ae01f482282900ff8faadb5917d479255e042fd079b50fc0c9745060d8ac35 WatchSource:0}: Error finding container d1ae01f482282900ff8faadb5917d479255e042fd079b50fc0c9745060d8ac35: Status 404 returned error can't find the container with id d1ae01f482282900ff8faadb5917d479255e042fd079b50fc0c9745060d8ac35 Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.789107 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.789343 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.789353 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.789362 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.826365 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 24 01:29:13 crc kubenswrapper[4755]: I1124 01:29:13.840110 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 24 01:29:14 crc kubenswrapper[4755]: I1124 01:29:14.229599 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-74db9f6546-bvfjf" event={"ID":"3431a799-d534-4932-8dad-1d2e49a74737","Type":"ContainerStarted","Data":"aeaf273448fbf66b722ad8b0293f2b8c3db5a12c2ebcf4b4fb4b1cc41b2e2376"} Nov 24 01:29:14 crc kubenswrapper[4755]: I1124 01:29:14.229656 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-74db9f6546-bvfjf" event={"ID":"3431a799-d534-4932-8dad-1d2e49a74737","Type":"ContainerStarted","Data":"90d95af224e73d08f34b63b67fcaaff30b00be6e3dc0ac0c80c995f916d60e3c"} Nov 24 01:29:14 crc kubenswrapper[4755]: I1124 01:29:14.229668 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-74db9f6546-bvfjf" event={"ID":"3431a799-d534-4932-8dad-1d2e49a74737","Type":"ContainerStarted","Data":"d1ae01f482282900ff8faadb5917d479255e042fd079b50fc0c9745060d8ac35"} Nov 24 01:29:14 crc kubenswrapper[4755]: I1124 01:29:14.229960 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:14 crc kubenswrapper[4755]: I1124 01:29:14.229998 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:14 crc kubenswrapper[4755]: I1124 01:29:14.231892 4755 generic.go:334] "Generic (PLEG): container finished" podID="2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" containerID="143f346afc30989f3a1a8234b200dbb402a93e3e31e8619f3ba263f982114bad" exitCode=0 Nov 24 01:29:14 crc kubenswrapper[4755]: I1124 01:29:14.231935 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" event={"ID":"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21","Type":"ContainerDied","Data":"143f346afc30989f3a1a8234b200dbb402a93e3e31e8619f3ba263f982114bad"} Nov 24 01:29:14 crc kubenswrapper[4755]: I1124 01:29:14.231977 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" event={"ID":"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21","Type":"ContainerStarted","Data":"37512cf43e6678242b8230c766cd97b89a35fb63b23f34d7b92d3301ace34457"} Nov 24 01:29:14 crc kubenswrapper[4755]: I1124 01:29:14.234897 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-545bd7b455-5w47r" event={"ID":"4100cddd-df77-4b8a-af0c-746bbd98c80f","Type":"ContainerStarted","Data":"db44004c0f7964254e183f7a2c0796aedcb75841a052d9f3fcbf039ee1b095fa"} Nov 24 01:29:14 crc kubenswrapper[4755]: I1124 01:29:14.259292 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-74db9f6546-bvfjf" podStartSLOduration=2.259270826 podStartE2EDuration="2.259270826s" podCreationTimestamp="2025-11-24 01:29:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:14.255158253 +0000 UTC m=+978.941223764" watchObservedRunningTime="2025-11-24 01:29:14.259270826 +0000 UTC m=+978.945336327" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.124875 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-64586f69c8-7phjt"] Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.126596 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.130159 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.132846 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.145140 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-64586f69c8-7phjt"] Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.183450 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-config-data\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.183538 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-config-data-custom\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.183589 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56vxs\" (UniqueName: \"kubernetes.io/projected/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-kube-api-access-56vxs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.183653 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-public-tls-certs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.183714 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-logs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.183739 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-combined-ca-bundle\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.183758 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-internal-tls-certs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.285547 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-config-data\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.285676 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-config-data-custom\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.285735 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56vxs\" (UniqueName: \"kubernetes.io/projected/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-kube-api-access-56vxs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.285793 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-public-tls-certs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.285867 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-logs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.285893 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-combined-ca-bundle\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.285911 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-internal-tls-certs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.289073 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-logs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.292622 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-config-data\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.296002 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-combined-ca-bundle\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.299849 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-public-tls-certs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.305134 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-internal-tls-certs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.309066 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56vxs\" (UniqueName: \"kubernetes.io/projected/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-kube-api-access-56vxs\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.323168 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f51ca687-cad4-4e48-bb34-1fd95c8bf47d-config-data-custom\") pod \"barbican-api-64586f69c8-7phjt\" (UID: \"f51ca687-cad4-4e48-bb34-1fd95c8bf47d\") " pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:15 crc kubenswrapper[4755]: I1124 01:29:15.474977 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.014517 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-64586f69c8-7phjt"] Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.280482 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" event={"ID":"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21","Type":"ContainerStarted","Data":"94d632be6382cb5836e89b418047b89ca951ebfa9566582dfdc70d9595841609"} Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.281650 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.292314 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-545bd7b455-5w47r" event={"ID":"4100cddd-df77-4b8a-af0c-746bbd98c80f","Type":"ContainerStarted","Data":"69f00adb3040595982423be2f80d3f05d0b611c44bd07983fd3be06177382264"} Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.292370 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-545bd7b455-5w47r" event={"ID":"4100cddd-df77-4b8a-af0c-746bbd98c80f","Type":"ContainerStarted","Data":"6cdbcf9a80b7b6b185927b8b703536eee1b80acec5fe8419255cf5b5287a710e"} Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.298578 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" event={"ID":"b447c315-6a28-4a18-af48-fbcf84cd0c00","Type":"ContainerStarted","Data":"ec2e33f52fa56eb29965f70b8280815edab7d9782a80881a42c5cabf0a5c66ca"} Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.298834 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" event={"ID":"b447c315-6a28-4a18-af48-fbcf84cd0c00","Type":"ContainerStarted","Data":"5ce865a747f01ed573b22a02c0b112f0788a43420080a5362f6c78a231cc4ad8"} Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.309333 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" podStartSLOduration=4.30930321 podStartE2EDuration="4.30930321s" podCreationTimestamp="2025-11-24 01:29:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:16.301644448 +0000 UTC m=+980.987709949" watchObservedRunningTime="2025-11-24 01:29:16.30930321 +0000 UTC m=+980.995368711" Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.316437 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-64586f69c8-7phjt" event={"ID":"f51ca687-cad4-4e48-bb34-1fd95c8bf47d","Type":"ContainerStarted","Data":"26be85a94601f075b426a591b686f738d42c7e1bed667043f470ab26137394cf"} Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.316480 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-64586f69c8-7phjt" event={"ID":"f51ca687-cad4-4e48-bb34-1fd95c8bf47d","Type":"ContainerStarted","Data":"7e782fea17ae7f4ffea90651ee8df29d6fa336ebf17ba744c1330cb58a0c26cb"} Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.323189 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-545bd7b455-5w47r" podStartSLOduration=2.279594328 podStartE2EDuration="4.323154072s" podCreationTimestamp="2025-11-24 01:29:12 +0000 UTC" firstStartedPulling="2025-11-24 01:29:13.324755792 +0000 UTC m=+978.010821293" lastFinishedPulling="2025-11-24 01:29:15.368315536 +0000 UTC m=+980.054381037" observedRunningTime="2025-11-24 01:29:16.318851113 +0000 UTC m=+981.004916614" watchObservedRunningTime="2025-11-24 01:29:16.323154072 +0000 UTC m=+981.009219573" Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.337265 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-86fc6cb5d-5rbfc" podStartSLOduration=2.185702928 podStartE2EDuration="4.337250391s" podCreationTimestamp="2025-11-24 01:29:12 +0000 UTC" firstStartedPulling="2025-11-24 01:29:13.182043685 +0000 UTC m=+977.868109186" lastFinishedPulling="2025-11-24 01:29:15.333591148 +0000 UTC m=+980.019656649" observedRunningTime="2025-11-24 01:29:16.335889973 +0000 UTC m=+981.021955474" watchObservedRunningTime="2025-11-24 01:29:16.337250391 +0000 UTC m=+981.023315892" Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.556792 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.557258 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 01:29:16 crc kubenswrapper[4755]: I1124 01:29:16.707060 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 24 01:29:17 crc kubenswrapper[4755]: I1124 01:29:17.342705 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-64586f69c8-7phjt" event={"ID":"f51ca687-cad4-4e48-bb34-1fd95c8bf47d","Type":"ContainerStarted","Data":"dc78f2b815fb230afc2390a0d12f2812c7f761ef270dcf5b7ea3f3f38e146a7f"} Nov 24 01:29:17 crc kubenswrapper[4755]: I1124 01:29:17.344386 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:17 crc kubenswrapper[4755]: I1124 01:29:17.344429 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:17 crc kubenswrapper[4755]: I1124 01:29:17.351657 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rhwbp" event={"ID":"aea62103-9b85-495d-bb71-3c69c02a3000","Type":"ContainerStarted","Data":"65e6aaee4313f1eabcd359c80695c88849bb98db23790e80c527eb075b29b021"} Nov 24 01:29:17 crc kubenswrapper[4755]: I1124 01:29:17.379457 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-64586f69c8-7phjt" podStartSLOduration=2.379436116 podStartE2EDuration="2.379436116s" podCreationTimestamp="2025-11-24 01:29:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:17.368565606 +0000 UTC m=+982.054631137" watchObservedRunningTime="2025-11-24 01:29:17.379436116 +0000 UTC m=+982.065501617" Nov 24 01:29:17 crc kubenswrapper[4755]: I1124 01:29:17.393754 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-rhwbp" podStartSLOduration=3.428126463 podStartE2EDuration="49.393668809s" podCreationTimestamp="2025-11-24 01:28:28 +0000 UTC" firstStartedPulling="2025-11-24 01:28:29.50496489 +0000 UTC m=+934.191030391" lastFinishedPulling="2025-11-24 01:29:15.470507236 +0000 UTC m=+980.156572737" observedRunningTime="2025-11-24 01:29:17.390682486 +0000 UTC m=+982.076747987" watchObservedRunningTime="2025-11-24 01:29:17.393668809 +0000 UTC m=+982.079734310" Nov 24 01:29:18 crc kubenswrapper[4755]: I1124 01:29:18.260885 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-55cf755d8-2cns2" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Nov 24 01:29:18 crc kubenswrapper[4755]: I1124 01:29:18.358785 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-75d8fb7cd4-vbxkn" podUID="5d176bdd-fe2f-4ed0-a930-2a6ae568b400" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Nov 24 01:29:21 crc kubenswrapper[4755]: E1124 01:29:21.332373 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="a7e96f37-574f-4900-88f9-33dc41179807" Nov 24 01:29:21 crc kubenswrapper[4755]: I1124 01:29:21.396140 4755 generic.go:334] "Generic (PLEG): container finished" podID="aea62103-9b85-495d-bb71-3c69c02a3000" containerID="65e6aaee4313f1eabcd359c80695c88849bb98db23790e80c527eb075b29b021" exitCode=0 Nov 24 01:29:21 crc kubenswrapper[4755]: I1124 01:29:21.396219 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rhwbp" event={"ID":"aea62103-9b85-495d-bb71-3c69c02a3000","Type":"ContainerDied","Data":"65e6aaee4313f1eabcd359c80695c88849bb98db23790e80c527eb075b29b021"} Nov 24 01:29:21 crc kubenswrapper[4755]: I1124 01:29:21.408496 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7e96f37-574f-4900-88f9-33dc41179807","Type":"ContainerStarted","Data":"e62d17590102a4335196ac2c1b11789315c835ac571282ec3ef7c06495a87c30"} Nov 24 01:29:21 crc kubenswrapper[4755]: I1124 01:29:21.408727 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="ceilometer-notification-agent" containerID="cri-o://f9e6e8f3d6be6e25c707568e1e71cb08d1ef2b9bc56285251a356b84cfbe3a19" gracePeriod=30 Nov 24 01:29:21 crc kubenswrapper[4755]: I1124 01:29:21.408763 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 24 01:29:21 crc kubenswrapper[4755]: I1124 01:29:21.408808 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="proxy-httpd" containerID="cri-o://e62d17590102a4335196ac2c1b11789315c835ac571282ec3ef7c06495a87c30" gracePeriod=30 Nov 24 01:29:21 crc kubenswrapper[4755]: I1124 01:29:21.408879 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="sg-core" containerID="cri-o://32ef1e5e5d2a7788aad23e607b6e88ccd77e35194ff72e7c65a23a13a8460145" gracePeriod=30 Nov 24 01:29:22 crc kubenswrapper[4755]: I1124 01:29:22.434656 4755 generic.go:334] "Generic (PLEG): container finished" podID="a7e96f37-574f-4900-88f9-33dc41179807" containerID="32ef1e5e5d2a7788aad23e607b6e88ccd77e35194ff72e7c65a23a13a8460145" exitCode=2 Nov 24 01:29:22 crc kubenswrapper[4755]: I1124 01:29:22.435162 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7e96f37-574f-4900-88f9-33dc41179807","Type":"ContainerDied","Data":"32ef1e5e5d2a7788aad23e607b6e88ccd77e35194ff72e7c65a23a13a8460145"} Nov 24 01:29:22 crc kubenswrapper[4755]: I1124 01:29:22.664759 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.494416 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-22wt6"] Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.495462 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" podUID="93836cb5-1416-48b2-bd66-984b5a90ee2b" containerName="dnsmasq-dns" containerID="cri-o://beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd" gracePeriod=10 Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.606953 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.706636 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdb5j\" (UniqueName: \"kubernetes.io/projected/aea62103-9b85-495d-bb71-3c69c02a3000-kube-api-access-pdb5j\") pod \"aea62103-9b85-495d-bb71-3c69c02a3000\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.706673 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-combined-ca-bundle\") pod \"aea62103-9b85-495d-bb71-3c69c02a3000\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.706751 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-db-sync-config-data\") pod \"aea62103-9b85-495d-bb71-3c69c02a3000\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.706770 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-scripts\") pod \"aea62103-9b85-495d-bb71-3c69c02a3000\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.706842 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aea62103-9b85-495d-bb71-3c69c02a3000-etc-machine-id\") pod \"aea62103-9b85-495d-bb71-3c69c02a3000\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.706915 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-config-data\") pod \"aea62103-9b85-495d-bb71-3c69c02a3000\" (UID: \"aea62103-9b85-495d-bb71-3c69c02a3000\") " Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.707976 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aea62103-9b85-495d-bb71-3c69c02a3000-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "aea62103-9b85-495d-bb71-3c69c02a3000" (UID: "aea62103-9b85-495d-bb71-3c69c02a3000"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.713714 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "aea62103-9b85-495d-bb71-3c69c02a3000" (UID: "aea62103-9b85-495d-bb71-3c69c02a3000"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.714226 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aea62103-9b85-495d-bb71-3c69c02a3000-kube-api-access-pdb5j" (OuterVolumeSpecName: "kube-api-access-pdb5j") pod "aea62103-9b85-495d-bb71-3c69c02a3000" (UID: "aea62103-9b85-495d-bb71-3c69c02a3000"). InnerVolumeSpecName "kube-api-access-pdb5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.717189 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-scripts" (OuterVolumeSpecName: "scripts") pod "aea62103-9b85-495d-bb71-3c69c02a3000" (UID: "aea62103-9b85-495d-bb71-3c69c02a3000"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.748404 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aea62103-9b85-495d-bb71-3c69c02a3000" (UID: "aea62103-9b85-495d-bb71-3c69c02a3000"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.803749 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-config-data" (OuterVolumeSpecName: "config-data") pod "aea62103-9b85-495d-bb71-3c69c02a3000" (UID: "aea62103-9b85-495d-bb71-3c69c02a3000"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.809105 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdb5j\" (UniqueName: \"kubernetes.io/projected/aea62103-9b85-495d-bb71-3c69c02a3000-kube-api-access-pdb5j\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.809139 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.809149 4755 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.809157 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.809166 4755 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aea62103-9b85-495d-bb71-3c69c02a3000-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:23 crc kubenswrapper[4755]: I1124 01:29:23.809174 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aea62103-9b85-495d-bb71-3c69c02a3000-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.397788 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.497911 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rhwbp" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.497909 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rhwbp" event={"ID":"aea62103-9b85-495d-bb71-3c69c02a3000","Type":"ContainerDied","Data":"9f6967441fce78e2fd7c5c4321739bfec9f5680932a44f5e113785512d133c6f"} Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.497978 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f6967441fce78e2fd7c5c4321739bfec9f5680932a44f5e113785512d133c6f" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.501461 4755 generic.go:334] "Generic (PLEG): container finished" podID="93836cb5-1416-48b2-bd66-984b5a90ee2b" containerID="beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd" exitCode=0 Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.501501 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.501519 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" event={"ID":"93836cb5-1416-48b2-bd66-984b5a90ee2b","Type":"ContainerDied","Data":"beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd"} Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.501568 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-22wt6" event={"ID":"93836cb5-1416-48b2-bd66-984b5a90ee2b","Type":"ContainerDied","Data":"ac9808c5c68d5327431494792d1f443928f39ddfa756808d434b4e93bb6f5de8"} Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.501599 4755 scope.go:117] "RemoveContainer" containerID="beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.522278 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-nb\") pod \"93836cb5-1416-48b2-bd66-984b5a90ee2b\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.522428 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-swift-storage-0\") pod \"93836cb5-1416-48b2-bd66-984b5a90ee2b\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.522716 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-sb\") pod \"93836cb5-1416-48b2-bd66-984b5a90ee2b\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.522789 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-config\") pod \"93836cb5-1416-48b2-bd66-984b5a90ee2b\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.522850 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-svc\") pod \"93836cb5-1416-48b2-bd66-984b5a90ee2b\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.522996 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgv62\" (UniqueName: \"kubernetes.io/projected/93836cb5-1416-48b2-bd66-984b5a90ee2b-kube-api-access-kgv62\") pod \"93836cb5-1416-48b2-bd66-984b5a90ee2b\" (UID: \"93836cb5-1416-48b2-bd66-984b5a90ee2b\") " Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.523280 4755 scope.go:117] "RemoveContainer" containerID="266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.526736 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93836cb5-1416-48b2-bd66-984b5a90ee2b-kube-api-access-kgv62" (OuterVolumeSpecName: "kube-api-access-kgv62") pod "93836cb5-1416-48b2-bd66-984b5a90ee2b" (UID: "93836cb5-1416-48b2-bd66-984b5a90ee2b"). InnerVolumeSpecName "kube-api-access-kgv62". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.532008 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.572297 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "93836cb5-1416-48b2-bd66-984b5a90ee2b" (UID: "93836cb5-1416-48b2-bd66-984b5a90ee2b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.580240 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "93836cb5-1416-48b2-bd66-984b5a90ee2b" (UID: "93836cb5-1416-48b2-bd66-984b5a90ee2b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.583777 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-config" (OuterVolumeSpecName: "config") pod "93836cb5-1416-48b2-bd66-984b5a90ee2b" (UID: "93836cb5-1416-48b2-bd66-984b5a90ee2b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.584141 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "93836cb5-1416-48b2-bd66-984b5a90ee2b" (UID: "93836cb5-1416-48b2-bd66-984b5a90ee2b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.587165 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "93836cb5-1416-48b2-bd66-984b5a90ee2b" (UID: "93836cb5-1416-48b2-bd66-984b5a90ee2b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.625670 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.625700 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.625709 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.625719 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgv62\" (UniqueName: \"kubernetes.io/projected/93836cb5-1416-48b2-bd66-984b5a90ee2b-kube-api-access-kgv62\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.625731 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.625741 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93836cb5-1416-48b2-bd66-984b5a90ee2b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.646528 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.688789 4755 scope.go:117] "RemoveContainer" containerID="beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd" Nov 24 01:29:24 crc kubenswrapper[4755]: E1124 01:29:24.690261 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd\": container with ID starting with beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd not found: ID does not exist" containerID="beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.690288 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd"} err="failed to get container status \"beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd\": rpc error: code = NotFound desc = could not find container \"beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd\": container with ID starting with beca6bf68635c9537a4511d9043983ed5b86ead16de53b5e27fd39917c3838dd not found: ID does not exist" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.690308 4755 scope.go:117] "RemoveContainer" containerID="266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5" Nov 24 01:29:24 crc kubenswrapper[4755]: E1124 01:29:24.690968 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5\": container with ID starting with 266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5 not found: ID does not exist" containerID="266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.690995 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5"} err="failed to get container status \"266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5\": rpc error: code = NotFound desc = could not find container \"266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5\": container with ID starting with 266803f7449613c7fa6df1c32c9f0de6005c35199bc0448325b374b77e2551e5 not found: ID does not exist" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.871648 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-22wt6"] Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.886887 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-22wt6"] Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.896780 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 24 01:29:24 crc kubenswrapper[4755]: E1124 01:29:24.897178 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93836cb5-1416-48b2-bd66-984b5a90ee2b" containerName="dnsmasq-dns" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.897195 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="93836cb5-1416-48b2-bd66-984b5a90ee2b" containerName="dnsmasq-dns" Nov 24 01:29:24 crc kubenswrapper[4755]: E1124 01:29:24.897209 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93836cb5-1416-48b2-bd66-984b5a90ee2b" containerName="init" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.897215 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="93836cb5-1416-48b2-bd66-984b5a90ee2b" containerName="init" Nov 24 01:29:24 crc kubenswrapper[4755]: E1124 01:29:24.897234 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aea62103-9b85-495d-bb71-3c69c02a3000" containerName="cinder-db-sync" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.897240 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="aea62103-9b85-495d-bb71-3c69c02a3000" containerName="cinder-db-sync" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.897398 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="aea62103-9b85-495d-bb71-3c69c02a3000" containerName="cinder-db-sync" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.897424 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="93836cb5-1416-48b2-bd66-984b5a90ee2b" containerName="dnsmasq-dns" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.898333 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.903019 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-4jcwk" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.903236 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.903336 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.903762 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.935685 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.964498 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-9bcwp"] Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.966885 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:24 crc kubenswrapper[4755]: I1124 01:29:24.972378 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-9bcwp"] Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.038846 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.038900 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxtdj\" (UniqueName: \"kubernetes.io/projected/da14d080-1117-42b6-b8d3-dfb22ef83267-kube-api-access-kxtdj\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.038931 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.038949 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/da14d080-1117-42b6-b8d3-dfb22ef83267-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.038977 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-config\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.039004 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.039027 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-scripts\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.039074 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.039104 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-svc\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.039138 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d4v9\" (UniqueName: \"kubernetes.io/projected/489c124a-e8f4-47a6-bc2e-e3aa9d450909-kube-api-access-4d4v9\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.039184 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.039244 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.039370 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.040865 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.045934 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.050090 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.141282 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-scripts\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.141394 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.141435 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data-custom\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.141466 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.141488 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-svc\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.141504 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-scripts\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.141521 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1e20cc1-51ab-439a-ac82-00412ac8a862-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.141538 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d4v9\" (UniqueName: \"kubernetes.io/projected/489c124a-e8f4-47a6-bc2e-e3aa9d450909-kube-api-access-4d4v9\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.141555 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.141614 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.142901 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.143469 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-svc\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.143665 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.143892 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.143926 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1e20cc1-51ab-439a-ac82-00412ac8a862-logs\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.143956 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnz8v\" (UniqueName: \"kubernetes.io/projected/f1e20cc1-51ab-439a-ac82-00412ac8a862-kube-api-access-vnz8v\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.143975 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.143992 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxtdj\" (UniqueName: \"kubernetes.io/projected/da14d080-1117-42b6-b8d3-dfb22ef83267-kube-api-access-kxtdj\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.144021 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.144049 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/da14d080-1117-42b6-b8d3-dfb22ef83267-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.144099 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-config\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.144135 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.144690 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.146239 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/da14d080-1117-42b6-b8d3-dfb22ef83267-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.146996 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-scripts\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.152302 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.156389 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-config\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.156822 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.158969 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.159538 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d4v9\" (UniqueName: \"kubernetes.io/projected/489c124a-e8f4-47a6-bc2e-e3aa9d450909-kube-api-access-4d4v9\") pod \"dnsmasq-dns-5784cf869f-9bcwp\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.165983 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxtdj\" (UniqueName: \"kubernetes.io/projected/da14d080-1117-42b6-b8d3-dfb22ef83267-kube-api-access-kxtdj\") pod \"cinder-scheduler-0\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.220453 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.246022 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1e20cc1-51ab-439a-ac82-00412ac8a862-logs\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.246087 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnz8v\" (UniqueName: \"kubernetes.io/projected/f1e20cc1-51ab-439a-ac82-00412ac8a862-kube-api-access-vnz8v\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.246201 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.246247 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data-custom\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.246285 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-scripts\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.246322 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1e20cc1-51ab-439a-ac82-00412ac8a862-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.246352 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.246549 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1e20cc1-51ab-439a-ac82-00412ac8a862-logs\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.247764 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1e20cc1-51ab-439a-ac82-00412ac8a862-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.251682 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.255172 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-scripts\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.255463 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data-custom\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.256174 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.270377 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnz8v\" (UniqueName: \"kubernetes.io/projected/f1e20cc1-51ab-439a-ac82-00412ac8a862-kube-api-access-vnz8v\") pod \"cinder-api-0\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.286163 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.371819 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.710445 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 24 01:29:25 crc kubenswrapper[4755]: I1124 01:29:25.716931 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 24 01:29:26 crc kubenswrapper[4755]: I1124 01:29:26.045337 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93836cb5-1416-48b2-bd66-984b5a90ee2b" path="/var/lib/kubelet/pods/93836cb5-1416-48b2-bd66-984b5a90ee2b/volumes" Nov 24 01:29:26 crc kubenswrapper[4755]: I1124 01:29:26.066758 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-9bcwp"] Nov 24 01:29:26 crc kubenswrapper[4755]: I1124 01:29:26.529565 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e20cc1-51ab-439a-ac82-00412ac8a862","Type":"ContainerStarted","Data":"63d5d17500ab1e78c20e3945c1f85e44843f2c6c8dac26bc1ce64f6e748d370d"} Nov 24 01:29:26 crc kubenswrapper[4755]: I1124 01:29:26.536076 4755 generic.go:334] "Generic (PLEG): container finished" podID="a7e96f37-574f-4900-88f9-33dc41179807" containerID="f9e6e8f3d6be6e25c707568e1e71cb08d1ef2b9bc56285251a356b84cfbe3a19" exitCode=0 Nov 24 01:29:26 crc kubenswrapper[4755]: I1124 01:29:26.536139 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7e96f37-574f-4900-88f9-33dc41179807","Type":"ContainerDied","Data":"f9e6e8f3d6be6e25c707568e1e71cb08d1ef2b9bc56285251a356b84cfbe3a19"} Nov 24 01:29:26 crc kubenswrapper[4755]: I1124 01:29:26.539410 4755 generic.go:334] "Generic (PLEG): container finished" podID="489c124a-e8f4-47a6-bc2e-e3aa9d450909" containerID="15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb" exitCode=0 Nov 24 01:29:26 crc kubenswrapper[4755]: I1124 01:29:26.539453 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" event={"ID":"489c124a-e8f4-47a6-bc2e-e3aa9d450909","Type":"ContainerDied","Data":"15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb"} Nov 24 01:29:26 crc kubenswrapper[4755]: I1124 01:29:26.539471 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" event={"ID":"489c124a-e8f4-47a6-bc2e-e3aa9d450909","Type":"ContainerStarted","Data":"9290506d75f259b1b020bca05b56ae57c9058964212836056809ba4372caff99"} Nov 24 01:29:26 crc kubenswrapper[4755]: I1124 01:29:26.542265 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"da14d080-1117-42b6-b8d3-dfb22ef83267","Type":"ContainerStarted","Data":"c78a0aa0642445b7f9a1e40a06edd8792643adf103cbd1cee40f8d5dd5e51c33"} Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.199095 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.241658 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.291454 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-64586f69c8-7phjt" Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.367354 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-74db9f6546-bvfjf"] Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.367586 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-74db9f6546-bvfjf" podUID="3431a799-d534-4932-8dad-1d2e49a74737" containerName="barbican-api-log" containerID="cri-o://90d95af224e73d08f34b63b67fcaaff30b00be6e3dc0ac0c80c995f916d60e3c" gracePeriod=30 Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.367726 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-74db9f6546-bvfjf" podUID="3431a799-d534-4932-8dad-1d2e49a74737" containerName="barbican-api" containerID="cri-o://aeaf273448fbf66b722ad8b0293f2b8c3db5a12c2ebcf4b4fb4b1cc41b2e2376" gracePeriod=30 Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.575097 4755 generic.go:334] "Generic (PLEG): container finished" podID="3431a799-d534-4932-8dad-1d2e49a74737" containerID="90d95af224e73d08f34b63b67fcaaff30b00be6e3dc0ac0c80c995f916d60e3c" exitCode=143 Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.575223 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-74db9f6546-bvfjf" event={"ID":"3431a799-d534-4932-8dad-1d2e49a74737","Type":"ContainerDied","Data":"90d95af224e73d08f34b63b67fcaaff30b00be6e3dc0ac0c80c995f916d60e3c"} Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.584741 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" event={"ID":"489c124a-e8f4-47a6-bc2e-e3aa9d450909","Type":"ContainerStarted","Data":"872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1"} Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.585066 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.592636 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"da14d080-1117-42b6-b8d3-dfb22ef83267","Type":"ContainerStarted","Data":"24859f1d8e28b44d051332ce0047b14bde88b3f637227af992fa9be9432c362a"} Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.613823 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" podStartSLOduration=3.613804565 podStartE2EDuration="3.613804565s" podCreationTimestamp="2025-11-24 01:29:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:27.606698399 +0000 UTC m=+992.292763920" watchObservedRunningTime="2025-11-24 01:29:27.613804565 +0000 UTC m=+992.299870066" Nov 24 01:29:27 crc kubenswrapper[4755]: I1124 01:29:27.619411 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e20cc1-51ab-439a-ac82-00412ac8a862","Type":"ContainerStarted","Data":"53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd"} Nov 24 01:29:28 crc kubenswrapper[4755]: I1124 01:29:28.682271 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"da14d080-1117-42b6-b8d3-dfb22ef83267","Type":"ContainerStarted","Data":"769c88939eb37c7a60ba2e21fae48dac3c92d89f49d03b30bb0a32b946de6c91"} Nov 24 01:29:28 crc kubenswrapper[4755]: I1124 01:29:28.731776 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f1e20cc1-51ab-439a-ac82-00412ac8a862" containerName="cinder-api-log" containerID="cri-o://53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd" gracePeriod=30 Nov 24 01:29:28 crc kubenswrapper[4755]: I1124 01:29:28.732083 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e20cc1-51ab-439a-ac82-00412ac8a862","Type":"ContainerStarted","Data":"e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a"} Nov 24 01:29:28 crc kubenswrapper[4755]: I1124 01:29:28.732129 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 24 01:29:28 crc kubenswrapper[4755]: I1124 01:29:28.732155 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f1e20cc1-51ab-439a-ac82-00412ac8a862" containerName="cinder-api" containerID="cri-o://e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a" gracePeriod=30 Nov 24 01:29:28 crc kubenswrapper[4755]: I1124 01:29:28.745446 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.935542852 podStartE2EDuration="4.745427638s" podCreationTimestamp="2025-11-24 01:29:24 +0000 UTC" firstStartedPulling="2025-11-24 01:29:25.723197541 +0000 UTC m=+990.409263042" lastFinishedPulling="2025-11-24 01:29:26.533082327 +0000 UTC m=+991.219147828" observedRunningTime="2025-11-24 01:29:28.743257898 +0000 UTC m=+993.429323439" watchObservedRunningTime="2025-11-24 01:29:28.745427638 +0000 UTC m=+993.431493139" Nov 24 01:29:28 crc kubenswrapper[4755]: I1124 01:29:28.809561 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.809539387 podStartE2EDuration="3.809539387s" podCreationTimestamp="2025-11-24 01:29:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:28.787274312 +0000 UTC m=+993.473339833" watchObservedRunningTime="2025-11-24 01:29:28.809539387 +0000 UTC m=+993.495604888" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.663578 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.742480 4755 generic.go:334] "Generic (PLEG): container finished" podID="f1e20cc1-51ab-439a-ac82-00412ac8a862" containerID="e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a" exitCode=0 Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.742511 4755 generic.go:334] "Generic (PLEG): container finished" podID="f1e20cc1-51ab-439a-ac82-00412ac8a862" containerID="53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd" exitCode=143 Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.742653 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.743329 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e20cc1-51ab-439a-ac82-00412ac8a862","Type":"ContainerDied","Data":"e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a"} Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.743391 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e20cc1-51ab-439a-ac82-00412ac8a862","Type":"ContainerDied","Data":"53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd"} Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.743408 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f1e20cc1-51ab-439a-ac82-00412ac8a862","Type":"ContainerDied","Data":"63d5d17500ab1e78c20e3945c1f85e44843f2c6c8dac26bc1ce64f6e748d370d"} Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.743430 4755 scope.go:117] "RemoveContainer" containerID="e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.754803 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data-custom\") pod \"f1e20cc1-51ab-439a-ac82-00412ac8a862\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.754859 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnz8v\" (UniqueName: \"kubernetes.io/projected/f1e20cc1-51ab-439a-ac82-00412ac8a862-kube-api-access-vnz8v\") pod \"f1e20cc1-51ab-439a-ac82-00412ac8a862\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.754901 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1e20cc1-51ab-439a-ac82-00412ac8a862-etc-machine-id\") pod \"f1e20cc1-51ab-439a-ac82-00412ac8a862\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.754943 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1e20cc1-51ab-439a-ac82-00412ac8a862-logs\") pod \"f1e20cc1-51ab-439a-ac82-00412ac8a862\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.754979 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-scripts\") pod \"f1e20cc1-51ab-439a-ac82-00412ac8a862\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.755075 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-combined-ca-bundle\") pod \"f1e20cc1-51ab-439a-ac82-00412ac8a862\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.755103 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data\") pod \"f1e20cc1-51ab-439a-ac82-00412ac8a862\" (UID: \"f1e20cc1-51ab-439a-ac82-00412ac8a862\") " Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.755741 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1e20cc1-51ab-439a-ac82-00412ac8a862-logs" (OuterVolumeSpecName: "logs") pod "f1e20cc1-51ab-439a-ac82-00412ac8a862" (UID: "f1e20cc1-51ab-439a-ac82-00412ac8a862"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.755801 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1e20cc1-51ab-439a-ac82-00412ac8a862-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f1e20cc1-51ab-439a-ac82-00412ac8a862" (UID: "f1e20cc1-51ab-439a-ac82-00412ac8a862"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.762584 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1e20cc1-51ab-439a-ac82-00412ac8a862-kube-api-access-vnz8v" (OuterVolumeSpecName: "kube-api-access-vnz8v") pod "f1e20cc1-51ab-439a-ac82-00412ac8a862" (UID: "f1e20cc1-51ab-439a-ac82-00412ac8a862"). InnerVolumeSpecName "kube-api-access-vnz8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.762969 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-scripts" (OuterVolumeSpecName: "scripts") pod "f1e20cc1-51ab-439a-ac82-00412ac8a862" (UID: "f1e20cc1-51ab-439a-ac82-00412ac8a862"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.771153 4755 scope.go:117] "RemoveContainer" containerID="53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.772658 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f1e20cc1-51ab-439a-ac82-00412ac8a862" (UID: "f1e20cc1-51ab-439a-ac82-00412ac8a862"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.791030 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1e20cc1-51ab-439a-ac82-00412ac8a862" (UID: "f1e20cc1-51ab-439a-ac82-00412ac8a862"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.809801 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data" (OuterVolumeSpecName: "config-data") pod "f1e20cc1-51ab-439a-ac82-00412ac8a862" (UID: "f1e20cc1-51ab-439a-ac82-00412ac8a862"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.833966 4755 scope.go:117] "RemoveContainer" containerID="e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a" Nov 24 01:29:29 crc kubenswrapper[4755]: E1124 01:29:29.834581 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a\": container with ID starting with e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a not found: ID does not exist" containerID="e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.834674 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a"} err="failed to get container status \"e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a\": rpc error: code = NotFound desc = could not find container \"e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a\": container with ID starting with e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a not found: ID does not exist" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.834700 4755 scope.go:117] "RemoveContainer" containerID="53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd" Nov 24 01:29:29 crc kubenswrapper[4755]: E1124 01:29:29.835125 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd\": container with ID starting with 53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd not found: ID does not exist" containerID="53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.835158 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd"} err="failed to get container status \"53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd\": rpc error: code = NotFound desc = could not find container \"53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd\": container with ID starting with 53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd not found: ID does not exist" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.835184 4755 scope.go:117] "RemoveContainer" containerID="e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.835519 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a"} err="failed to get container status \"e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a\": rpc error: code = NotFound desc = could not find container \"e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a\": container with ID starting with e472d453743de4ee8c000782010cfeb3867eb969d5468d4fe3b8b42ef52adf1a not found: ID does not exist" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.835543 4755 scope.go:117] "RemoveContainer" containerID="53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.835858 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd"} err="failed to get container status \"53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd\": rpc error: code = NotFound desc = could not find container \"53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd\": container with ID starting with 53423f179d9cc8793d19e107e7105d276ba153ab9fd03c512218a31b634717fd not found: ID does not exist" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.855082 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.856932 4755 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.856961 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnz8v\" (UniqueName: \"kubernetes.io/projected/f1e20cc1-51ab-439a-ac82-00412ac8a862-kube-api-access-vnz8v\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.856977 4755 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1e20cc1-51ab-439a-ac82-00412ac8a862-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.856987 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1e20cc1-51ab-439a-ac82-00412ac8a862-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.856998 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.857009 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:29 crc kubenswrapper[4755]: I1124 01:29:29.857019 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1e20cc1-51ab-439a-ac82-00412ac8a862-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.068985 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.086679 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.095222 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 24 01:29:30 crc kubenswrapper[4755]: E1124 01:29:30.095659 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e20cc1-51ab-439a-ac82-00412ac8a862" containerName="cinder-api" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.095675 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e20cc1-51ab-439a-ac82-00412ac8a862" containerName="cinder-api" Nov 24 01:29:30 crc kubenswrapper[4755]: E1124 01:29:30.095717 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e20cc1-51ab-439a-ac82-00412ac8a862" containerName="cinder-api-log" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.095724 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e20cc1-51ab-439a-ac82-00412ac8a862" containerName="cinder-api-log" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.095888 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1e20cc1-51ab-439a-ac82-00412ac8a862" containerName="cinder-api-log" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.095907 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1e20cc1-51ab-439a-ac82-00412ac8a862" containerName="cinder-api" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.096847 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.101547 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.102376 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.102731 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.106001 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.170890 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.170932 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.170952 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.170969 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.171247 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-config-data\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.171347 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sqzd\" (UniqueName: \"kubernetes.io/projected/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-kube-api-access-5sqzd\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.171440 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-scripts\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.171478 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-logs\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.171505 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-config-data-custom\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.221460 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.272998 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sqzd\" (UniqueName: \"kubernetes.io/projected/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-kube-api-access-5sqzd\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.273269 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-scripts\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.273297 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-logs\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.273317 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-config-data-custom\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.273367 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.273385 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.273410 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.273431 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.273512 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-config-data\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.273739 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.274925 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-logs\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.278643 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.279062 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-scripts\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.279096 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.279264 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.280164 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-config-data-custom\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.291194 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-config-data\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.297518 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sqzd\" (UniqueName: \"kubernetes.io/projected/a7aabf3f-133b-4bdf-86e6-9fb76e89d076-kube-api-access-5sqzd\") pod \"cinder-api-0\" (UID: \"a7aabf3f-133b-4bdf-86e6-9fb76e89d076\") " pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.424980 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.607094 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-74db9f6546-bvfjf" podUID="3431a799-d534-4932-8dad-1d2e49a74737" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:41284->10.217.0.161:9311: read: connection reset by peer" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.607615 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-74db9f6546-bvfjf" podUID="3431a799-d534-4932-8dad-1d2e49a74737" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:41282->10.217.0.161:9311: read: connection reset by peer" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.705257 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.756078 4755 generic.go:334] "Generic (PLEG): container finished" podID="3431a799-d534-4932-8dad-1d2e49a74737" containerID="aeaf273448fbf66b722ad8b0293f2b8c3db5a12c2ebcf4b4fb4b1cc41b2e2376" exitCode=0 Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.756115 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-74db9f6546-bvfjf" event={"ID":"3431a799-d534-4932-8dad-1d2e49a74737","Type":"ContainerDied","Data":"aeaf273448fbf66b722ad8b0293f2b8c3db5a12c2ebcf4b4fb4b1cc41b2e2376"} Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.767254 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:29:30 crc kubenswrapper[4755]: I1124 01:29:30.939018 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.113793 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.191005 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-combined-ca-bundle\") pod \"3431a799-d534-4932-8dad-1d2e49a74737\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.191064 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data\") pod \"3431a799-d534-4932-8dad-1d2e49a74737\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.191126 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crxp8\" (UniqueName: \"kubernetes.io/projected/3431a799-d534-4932-8dad-1d2e49a74737-kube-api-access-crxp8\") pod \"3431a799-d534-4932-8dad-1d2e49a74737\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.191333 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data-custom\") pod \"3431a799-d534-4932-8dad-1d2e49a74737\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.191430 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3431a799-d534-4932-8dad-1d2e49a74737-logs\") pod \"3431a799-d534-4932-8dad-1d2e49a74737\" (UID: \"3431a799-d534-4932-8dad-1d2e49a74737\") " Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.192516 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3431a799-d534-4932-8dad-1d2e49a74737-logs" (OuterVolumeSpecName: "logs") pod "3431a799-d534-4932-8dad-1d2e49a74737" (UID: "3431a799-d534-4932-8dad-1d2e49a74737"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.197706 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3431a799-d534-4932-8dad-1d2e49a74737" (UID: "3431a799-d534-4932-8dad-1d2e49a74737"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.199328 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3431a799-d534-4932-8dad-1d2e49a74737-kube-api-access-crxp8" (OuterVolumeSpecName: "kube-api-access-crxp8") pod "3431a799-d534-4932-8dad-1d2e49a74737" (UID: "3431a799-d534-4932-8dad-1d2e49a74737"). InnerVolumeSpecName "kube-api-access-crxp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.222924 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3431a799-d534-4932-8dad-1d2e49a74737" (UID: "3431a799-d534-4932-8dad-1d2e49a74737"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.253714 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data" (OuterVolumeSpecName: "config-data") pod "3431a799-d534-4932-8dad-1d2e49a74737" (UID: "3431a799-d534-4932-8dad-1d2e49a74737"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.293404 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3431a799-d534-4932-8dad-1d2e49a74737-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.293435 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.293444 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.293454 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crxp8\" (UniqueName: \"kubernetes.io/projected/3431a799-d534-4932-8dad-1d2e49a74737-kube-api-access-crxp8\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.293462 4755 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3431a799-d534-4932-8dad-1d2e49a74737-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.769890 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a7aabf3f-133b-4bdf-86e6-9fb76e89d076","Type":"ContainerStarted","Data":"bc5b5125d10097bac4a5061b824ab73cd5f011309a32279ae1ed958cf0189cc5"} Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.770257 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a7aabf3f-133b-4bdf-86e6-9fb76e89d076","Type":"ContainerStarted","Data":"7591ff52c5c5e9065dd5226577bdb2d284f0d4e78f75237d5a2cb616b3e43986"} Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.773071 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-74db9f6546-bvfjf" event={"ID":"3431a799-d534-4932-8dad-1d2e49a74737","Type":"ContainerDied","Data":"d1ae01f482282900ff8faadb5917d479255e042fd079b50fc0c9745060d8ac35"} Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.773133 4755 scope.go:117] "RemoveContainer" containerID="aeaf273448fbf66b722ad8b0293f2b8c3db5a12c2ebcf4b4fb4b1cc41b2e2376" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.773273 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-74db9f6546-bvfjf" Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.810085 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-74db9f6546-bvfjf"] Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.817296 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-74db9f6546-bvfjf"] Nov 24 01:29:31 crc kubenswrapper[4755]: I1124 01:29:31.818852 4755 scope.go:117] "RemoveContainer" containerID="90d95af224e73d08f34b63b67fcaaff30b00be6e3dc0ac0c80c995f916d60e3c" Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.008899 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3431a799-d534-4932-8dad-1d2e49a74737" path="/var/lib/kubelet/pods/3431a799-d534-4932-8dad-1d2e49a74737/volumes" Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.009567 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1e20cc1-51ab-439a-ac82-00412ac8a862" path="/var/lib/kubelet/pods/f1e20cc1-51ab-439a-ac82-00412ac8a862/volumes" Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.031316 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-765f9bdf9-sx8ch" Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.095370 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-55bcc7b6cd-xzjdz"] Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.098359 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-55bcc7b6cd-xzjdz" podUID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" containerName="neutron-api" containerID="cri-o://bcf0488860390ca50c5aa5b2512ecdca965b8aa8f5e6379eb739b2408cb67c84" gracePeriod=30 Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.098757 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-55bcc7b6cd-xzjdz" podUID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" containerName="neutron-httpd" containerID="cri-o://bc1a0c840395721f8fa147c4be20748da99022eabfad9c5a5875326fc32d6e21" gracePeriod=30 Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.466451 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-75d8fb7cd4-vbxkn" Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.526122 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-55cf755d8-2cns2"] Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.526349 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-55cf755d8-2cns2" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon-log" containerID="cri-o://1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99" gracePeriod=30 Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.526482 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-55cf755d8-2cns2" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon" containerID="cri-o://4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b" gracePeriod=30 Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.542760 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-55cf755d8-2cns2" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": EOF" Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.783325 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a7aabf3f-133b-4bdf-86e6-9fb76e89d076","Type":"ContainerStarted","Data":"49fca86da78466992a28bbe5e3faf69e01d21c272ad8ae0054938075122b07b8"} Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.783441 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.786561 4755 generic.go:334] "Generic (PLEG): container finished" podID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" containerID="bc1a0c840395721f8fa147c4be20748da99022eabfad9c5a5875326fc32d6e21" exitCode=0 Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.786640 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bcc7b6cd-xzjdz" event={"ID":"fbb6d8d0-981e-4309-a478-1f07ea12d6c7","Type":"ContainerDied","Data":"bc1a0c840395721f8fa147c4be20748da99022eabfad9c5a5875326fc32d6e21"} Nov 24 01:29:32 crc kubenswrapper[4755]: I1124 01:29:32.811200 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.811182627 podStartE2EDuration="2.811182627s" podCreationTimestamp="2025-11-24 01:29:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:32.800310987 +0000 UTC m=+997.486376538" watchObservedRunningTime="2025-11-24 01:29:32.811182627 +0000 UTC m=+997.497248128" Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.289021 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.387044 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5xwvl"] Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.387421 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" podUID="2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" containerName="dnsmasq-dns" containerID="cri-o://94d632be6382cb5836e89b418047b89ca951ebfa9566582dfdc70d9595841609" gracePeriod=10 Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.559751 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.616453 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.667777 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-55cf755d8-2cns2" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:44336->10.217.0.148:8443: read: connection reset by peer" Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.844109 4755 generic.go:334] "Generic (PLEG): container finished" podID="2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" containerID="94d632be6382cb5836e89b418047b89ca951ebfa9566582dfdc70d9595841609" exitCode=0 Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.844194 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" event={"ID":"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21","Type":"ContainerDied","Data":"94d632be6382cb5836e89b418047b89ca951ebfa9566582dfdc70d9595841609"} Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.844279 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" event={"ID":"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21","Type":"ContainerDied","Data":"37512cf43e6678242b8230c766cd97b89a35fb63b23f34d7b92d3301ace34457"} Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.844298 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37512cf43e6678242b8230c766cd97b89a35fb63b23f34d7b92d3301ace34457" Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.850928 4755 generic.go:334] "Generic (PLEG): container finished" podID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerID="4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b" exitCode=0 Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.850979 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55cf755d8-2cns2" event={"ID":"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb","Type":"ContainerDied","Data":"4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b"} Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.851180 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="da14d080-1117-42b6-b8d3-dfb22ef83267" containerName="cinder-scheduler" containerID="cri-o://24859f1d8e28b44d051332ce0047b14bde88b3f637227af992fa9be9432c362a" gracePeriod=30 Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.851248 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="da14d080-1117-42b6-b8d3-dfb22ef83267" containerName="probe" containerID="cri-o://769c88939eb37c7a60ba2e21fae48dac3c92d89f49d03b30bb0a32b946de6c91" gracePeriod=30 Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.896047 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.983798 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-svc\") pod \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.983882 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqrm5\" (UniqueName: \"kubernetes.io/projected/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-kube-api-access-mqrm5\") pod \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.983904 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-config\") pod \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.983918 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-swift-storage-0\") pod \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.983972 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-sb\") pod \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.984015 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-nb\") pod \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\" (UID: \"2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21\") " Nov 24 01:29:35 crc kubenswrapper[4755]: I1124 01:29:35.989610 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-kube-api-access-mqrm5" (OuterVolumeSpecName: "kube-api-access-mqrm5") pod "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" (UID: "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21"). InnerVolumeSpecName "kube-api-access-mqrm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.041989 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" (UID: "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.046401 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-config" (OuterVolumeSpecName: "config") pod "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" (UID: "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.048335 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" (UID: "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.051886 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" (UID: "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.070105 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" (UID: "2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.086896 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.086933 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqrm5\" (UniqueName: \"kubernetes.io/projected/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-kube-api-access-mqrm5\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.086947 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.086959 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.086973 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.086987 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.872955 4755 generic.go:334] "Generic (PLEG): container finished" podID="da14d080-1117-42b6-b8d3-dfb22ef83267" containerID="769c88939eb37c7a60ba2e21fae48dac3c92d89f49d03b30bb0a32b946de6c91" exitCode=0 Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.873033 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"da14d080-1117-42b6-b8d3-dfb22ef83267","Type":"ContainerDied","Data":"769c88939eb37c7a60ba2e21fae48dac3c92d89f49d03b30bb0a32b946de6c91"} Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.873302 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-5xwvl" Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.903930 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5xwvl"] Nov 24 01:29:36 crc kubenswrapper[4755]: I1124 01:29:36.912917 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-5xwvl"] Nov 24 01:29:38 crc kubenswrapper[4755]: I1124 01:29:38.010933 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" path="/var/lib/kubelet/pods/2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21/volumes" Nov 24 01:29:38 crc kubenswrapper[4755]: I1124 01:29:38.258182 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-55cf755d8-2cns2" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Nov 24 01:29:38 crc kubenswrapper[4755]: I1124 01:29:38.899849 4755 generic.go:334] "Generic (PLEG): container finished" podID="da14d080-1117-42b6-b8d3-dfb22ef83267" containerID="24859f1d8e28b44d051332ce0047b14bde88b3f637227af992fa9be9432c362a" exitCode=0 Nov 24 01:29:38 crc kubenswrapper[4755]: I1124 01:29:38.899927 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"da14d080-1117-42b6-b8d3-dfb22ef83267","Type":"ContainerDied","Data":"24859f1d8e28b44d051332ce0047b14bde88b3f637227af992fa9be9432c362a"} Nov 24 01:29:38 crc kubenswrapper[4755]: I1124 01:29:38.985308 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.043232 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-scripts\") pod \"da14d080-1117-42b6-b8d3-dfb22ef83267\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.043284 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxtdj\" (UniqueName: \"kubernetes.io/projected/da14d080-1117-42b6-b8d3-dfb22ef83267-kube-api-access-kxtdj\") pod \"da14d080-1117-42b6-b8d3-dfb22ef83267\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.043322 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-combined-ca-bundle\") pod \"da14d080-1117-42b6-b8d3-dfb22ef83267\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.043338 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/da14d080-1117-42b6-b8d3-dfb22ef83267-etc-machine-id\") pod \"da14d080-1117-42b6-b8d3-dfb22ef83267\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.043442 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data-custom\") pod \"da14d080-1117-42b6-b8d3-dfb22ef83267\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.043508 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data\") pod \"da14d080-1117-42b6-b8d3-dfb22ef83267\" (UID: \"da14d080-1117-42b6-b8d3-dfb22ef83267\") " Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.044706 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/da14d080-1117-42b6-b8d3-dfb22ef83267-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "da14d080-1117-42b6-b8d3-dfb22ef83267" (UID: "da14d080-1117-42b6-b8d3-dfb22ef83267"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.052141 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-scripts" (OuterVolumeSpecName: "scripts") pod "da14d080-1117-42b6-b8d3-dfb22ef83267" (UID: "da14d080-1117-42b6-b8d3-dfb22ef83267"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.053930 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da14d080-1117-42b6-b8d3-dfb22ef83267-kube-api-access-kxtdj" (OuterVolumeSpecName: "kube-api-access-kxtdj") pod "da14d080-1117-42b6-b8d3-dfb22ef83267" (UID: "da14d080-1117-42b6-b8d3-dfb22ef83267"). InnerVolumeSpecName "kube-api-access-kxtdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.066757 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "da14d080-1117-42b6-b8d3-dfb22ef83267" (UID: "da14d080-1117-42b6-b8d3-dfb22ef83267"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.129632 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "da14d080-1117-42b6-b8d3-dfb22ef83267" (UID: "da14d080-1117-42b6-b8d3-dfb22ef83267"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.145731 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.145762 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxtdj\" (UniqueName: \"kubernetes.io/projected/da14d080-1117-42b6-b8d3-dfb22ef83267-kube-api-access-kxtdj\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.145775 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.145783 4755 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/da14d080-1117-42b6-b8d3-dfb22ef83267-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.145791 4755 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.209940 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data" (OuterVolumeSpecName: "config-data") pod "da14d080-1117-42b6-b8d3-dfb22ef83267" (UID: "da14d080-1117-42b6-b8d3-dfb22ef83267"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.247410 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da14d080-1117-42b6-b8d3-dfb22ef83267-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.916486 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"da14d080-1117-42b6-b8d3-dfb22ef83267","Type":"ContainerDied","Data":"c78a0aa0642445b7f9a1e40a06edd8792643adf103cbd1cee40f8d5dd5e51c33"} Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.917725 4755 scope.go:117] "RemoveContainer" containerID="769c88939eb37c7a60ba2e21fae48dac3c92d89f49d03b30bb0a32b946de6c91" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.917997 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.948229 4755 scope.go:117] "RemoveContainer" containerID="24859f1d8e28b44d051332ce0047b14bde88b3f637227af992fa9be9432c362a" Nov 24 01:29:39 crc kubenswrapper[4755]: I1124 01:29:39.990735 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.016816 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.016858 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 24 01:29:40 crc kubenswrapper[4755]: E1124 01:29:40.017096 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" containerName="init" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017113 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" containerName="init" Nov 24 01:29:40 crc kubenswrapper[4755]: E1124 01:29:40.017126 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3431a799-d534-4932-8dad-1d2e49a74737" containerName="barbican-api-log" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017133 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3431a799-d534-4932-8dad-1d2e49a74737" containerName="barbican-api-log" Nov 24 01:29:40 crc kubenswrapper[4755]: E1124 01:29:40.017152 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" containerName="dnsmasq-dns" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017157 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" containerName="dnsmasq-dns" Nov 24 01:29:40 crc kubenswrapper[4755]: E1124 01:29:40.017169 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da14d080-1117-42b6-b8d3-dfb22ef83267" containerName="cinder-scheduler" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017176 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="da14d080-1117-42b6-b8d3-dfb22ef83267" containerName="cinder-scheduler" Nov 24 01:29:40 crc kubenswrapper[4755]: E1124 01:29:40.017196 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da14d080-1117-42b6-b8d3-dfb22ef83267" containerName="probe" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017203 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="da14d080-1117-42b6-b8d3-dfb22ef83267" containerName="probe" Nov 24 01:29:40 crc kubenswrapper[4755]: E1124 01:29:40.017224 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3431a799-d534-4932-8dad-1d2e49a74737" containerName="barbican-api" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017229 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3431a799-d534-4932-8dad-1d2e49a74737" containerName="barbican-api" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017378 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3431a799-d534-4932-8dad-1d2e49a74737" containerName="barbican-api-log" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017388 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cd4ba5f-a6b8-4ccd-9053-5bc65a366e21" containerName="dnsmasq-dns" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017396 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3431a799-d534-4932-8dad-1d2e49a74737" containerName="barbican-api" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017407 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="da14d080-1117-42b6-b8d3-dfb22ef83267" containerName="cinder-scheduler" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.017414 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="da14d080-1117-42b6-b8d3-dfb22ef83267" containerName="probe" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.018289 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.022090 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.052721 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.065957 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpg2b\" (UniqueName: \"kubernetes.io/projected/0404892f-7ed1-4990-a8b1-960e6531b017-kube-api-access-tpg2b\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.066053 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.066100 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.066129 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-config-data\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.066166 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0404892f-7ed1-4990-a8b1-960e6531b017-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.066203 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-scripts\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.167850 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.168744 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.168786 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-config-data\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.168834 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0404892f-7ed1-4990-a8b1-960e6531b017-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.168923 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-scripts\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.169018 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpg2b\" (UniqueName: \"kubernetes.io/projected/0404892f-7ed1-4990-a8b1-960e6531b017-kube-api-access-tpg2b\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.169858 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0404892f-7ed1-4990-a8b1-960e6531b017-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.174096 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.174563 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.176183 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-scripts\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.192150 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpg2b\" (UniqueName: \"kubernetes.io/projected/0404892f-7ed1-4990-a8b1-960e6531b017-kube-api-access-tpg2b\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.211830 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0404892f-7ed1-4990-a8b1-960e6531b017-config-data\") pod \"cinder-scheduler-0\" (UID: \"0404892f-7ed1-4990-a8b1-960e6531b017\") " pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.273725 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-68b755649b-gdjxt" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.344216 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.564267 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8674657456-64797" Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.686182 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8674657456-64797" Nov 24 01:29:40 crc kubenswrapper[4755]: W1124 01:29:40.907428 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0404892f_7ed1_4990_a8b1_960e6531b017.slice/crio-e4c92d9abfe7d396a9d40bea962e55b680727a31a1c3206f7bf2632da57ffc76 WatchSource:0}: Error finding container e4c92d9abfe7d396a9d40bea962e55b680727a31a1c3206f7bf2632da57ffc76: Status 404 returned error can't find the container with id e4c92d9abfe7d396a9d40bea962e55b680727a31a1c3206f7bf2632da57ffc76 Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.919263 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 24 01:29:40 crc kubenswrapper[4755]: I1124 01:29:40.936232 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0404892f-7ed1-4990-a8b1-960e6531b017","Type":"ContainerStarted","Data":"e4c92d9abfe7d396a9d40bea962e55b680727a31a1c3206f7bf2632da57ffc76"} Nov 24 01:29:42 crc kubenswrapper[4755]: I1124 01:29:42.017678 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da14d080-1117-42b6-b8d3-dfb22ef83267" path="/var/lib/kubelet/pods/da14d080-1117-42b6-b8d3-dfb22ef83267/volumes" Nov 24 01:29:42 crc kubenswrapper[4755]: I1124 01:29:42.018534 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0404892f-7ed1-4990-a8b1-960e6531b017","Type":"ContainerStarted","Data":"99e85fdb926c8d72c97b52ee617061daf9d25305b1096f9ff5a835bd47e8e052"} Nov 24 01:29:42 crc kubenswrapper[4755]: I1124 01:29:42.598519 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 24 01:29:43 crc kubenswrapper[4755]: I1124 01:29:43.025796 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0404892f-7ed1-4990-a8b1-960e6531b017","Type":"ContainerStarted","Data":"db2dad67edb993762cd47f4e513c86dd30285626506b01d14e003c6408ee1c6d"} Nov 24 01:29:43 crc kubenswrapper[4755]: I1124 01:29:43.052663 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.052609421 podStartE2EDuration="4.052609421s" podCreationTimestamp="2025-11-24 01:29:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:43.045585037 +0000 UTC m=+1007.731650568" watchObservedRunningTime="2025-11-24 01:29:43.052609421 +0000 UTC m=+1007.738674942" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.041468 4755 generic.go:334] "Generic (PLEG): container finished" podID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" containerID="bcf0488860390ca50c5aa5b2512ecdca965b8aa8f5e6379eb739b2408cb67c84" exitCode=0 Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.041529 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bcc7b6cd-xzjdz" event={"ID":"fbb6d8d0-981e-4309-a478-1f07ea12d6c7","Type":"ContainerDied","Data":"bcf0488860390ca50c5aa5b2512ecdca965b8aa8f5e6379eb739b2408cb67c84"} Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.377236 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.508692 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-config\") pod \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.508825 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-ovndb-tls-certs\") pod \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.508863 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-combined-ca-bundle\") pod \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.508913 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnggt\" (UniqueName: \"kubernetes.io/projected/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-kube-api-access-qnggt\") pod \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.508944 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-httpd-config\") pod \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\" (UID: \"fbb6d8d0-981e-4309-a478-1f07ea12d6c7\") " Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.538992 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "fbb6d8d0-981e-4309-a478-1f07ea12d6c7" (UID: "fbb6d8d0-981e-4309-a478-1f07ea12d6c7"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.539033 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-kube-api-access-qnggt" (OuterVolumeSpecName: "kube-api-access-qnggt") pod "fbb6d8d0-981e-4309-a478-1f07ea12d6c7" (UID: "fbb6d8d0-981e-4309-a478-1f07ea12d6c7"). InnerVolumeSpecName "kube-api-access-qnggt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.569390 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-55d8c9dc95-pj9b5"] Nov 24 01:29:44 crc kubenswrapper[4755]: E1124 01:29:44.570241 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" containerName="neutron-httpd" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.570258 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" containerName="neutron-httpd" Nov 24 01:29:44 crc kubenswrapper[4755]: E1124 01:29:44.570280 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" containerName="neutron-api" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.570298 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" containerName="neutron-api" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.570506 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" containerName="neutron-httpd" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.570522 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" containerName="neutron-api" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.571412 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.576568 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.576823 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.577032 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.611581 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbb6d8d0-981e-4309-a478-1f07ea12d6c7" (UID: "fbb6d8d0-981e-4309-a478-1f07ea12d6c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.628336 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.628380 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnggt\" (UniqueName: \"kubernetes.io/projected/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-kube-api-access-qnggt\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.628396 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.644060 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-config" (OuterVolumeSpecName: "config") pod "fbb6d8d0-981e-4309-a478-1f07ea12d6c7" (UID: "fbb6d8d0-981e-4309-a478-1f07ea12d6c7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.647849 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-55d8c9dc95-pj9b5"] Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.667547 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "fbb6d8d0-981e-4309-a478-1f07ea12d6c7" (UID: "fbb6d8d0-981e-4309-a478-1f07ea12d6c7"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.694671 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.695985 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.699514 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-bp4qj" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.699785 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.699975 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.701863 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.730036 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpf4q\" (UniqueName: \"kubernetes.io/projected/5e2272ce-0bb7-4cc8-a11b-be4947646efd-kube-api-access-bpf4q\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.730089 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-config-data\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.730110 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e2272ce-0bb7-4cc8-a11b-be4947646efd-run-httpd\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.730128 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e2272ce-0bb7-4cc8-a11b-be4947646efd-log-httpd\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.730146 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-internal-tls-certs\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.730191 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-combined-ca-bundle\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.730253 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e2272ce-0bb7-4cc8-a11b-be4947646efd-etc-swift\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.730284 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-public-tls-certs\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.730339 4755 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.730352 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/fbb6d8d0-981e-4309-a478-1f07ea12d6c7-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.831931 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c6ba259-f60e-4b3f-b901-e42aaff73569-openstack-config\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.832547 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpf4q\" (UniqueName: \"kubernetes.io/projected/5e2272ce-0bb7-4cc8-a11b-be4947646efd-kube-api-access-bpf4q\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.833007 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-config-data\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.833047 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e2272ce-0bb7-4cc8-a11b-be4947646efd-run-httpd\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.833074 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e2272ce-0bb7-4cc8-a11b-be4947646efd-log-httpd\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.833097 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-internal-tls-certs\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.833148 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c6ba259-f60e-4b3f-b901-e42aaff73569-openstack-config-secret\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.833219 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-combined-ca-bundle\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.833331 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fdpc\" (UniqueName: \"kubernetes.io/projected/1c6ba259-f60e-4b3f-b901-e42aaff73569-kube-api-access-6fdpc\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.833398 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e2272ce-0bb7-4cc8-a11b-be4947646efd-etc-swift\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.833474 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-public-tls-certs\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.833535 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6ba259-f60e-4b3f-b901-e42aaff73569-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.834241 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e2272ce-0bb7-4cc8-a11b-be4947646efd-run-httpd\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.834311 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e2272ce-0bb7-4cc8-a11b-be4947646efd-log-httpd\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.837378 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e2272ce-0bb7-4cc8-a11b-be4947646efd-etc-swift\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.840357 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-config-data\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.841030 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-public-tls-certs\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.842771 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-combined-ca-bundle\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.846738 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e2272ce-0bb7-4cc8-a11b-be4947646efd-internal-tls-certs\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.849521 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpf4q\" (UniqueName: \"kubernetes.io/projected/5e2272ce-0bb7-4cc8-a11b-be4947646efd-kube-api-access-bpf4q\") pod \"swift-proxy-55d8c9dc95-pj9b5\" (UID: \"5e2272ce-0bb7-4cc8-a11b-be4947646efd\") " pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.935506 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c6ba259-f60e-4b3f-b901-e42aaff73569-openstack-config-secret\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.935654 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fdpc\" (UniqueName: \"kubernetes.io/projected/1c6ba259-f60e-4b3f-b901-e42aaff73569-kube-api-access-6fdpc\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.935734 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6ba259-f60e-4b3f-b901-e42aaff73569-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.935771 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c6ba259-f60e-4b3f-b901-e42aaff73569-openstack-config\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.936981 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c6ba259-f60e-4b3f-b901-e42aaff73569-openstack-config\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.939764 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c6ba259-f60e-4b3f-b901-e42aaff73569-openstack-config-secret\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.944376 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6ba259-f60e-4b3f-b901-e42aaff73569-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.953118 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fdpc\" (UniqueName: \"kubernetes.io/projected/1c6ba259-f60e-4b3f-b901-e42aaff73569-kube-api-access-6fdpc\") pod \"openstackclient\" (UID: \"1c6ba259-f60e-4b3f-b901-e42aaff73569\") " pod="openstack/openstackclient" Nov 24 01:29:44 crc kubenswrapper[4755]: I1124 01:29:44.957752 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:45 crc kubenswrapper[4755]: I1124 01:29:45.018232 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 24 01:29:45 crc kubenswrapper[4755]: I1124 01:29:45.076439 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bcc7b6cd-xzjdz" event={"ID":"fbb6d8d0-981e-4309-a478-1f07ea12d6c7","Type":"ContainerDied","Data":"16fac4ee25e32ce5a804add26328e2bbae92e23afd23196bca5e2ec4c1cfaca5"} Nov 24 01:29:45 crc kubenswrapper[4755]: I1124 01:29:45.076833 4755 scope.go:117] "RemoveContainer" containerID="bc1a0c840395721f8fa147c4be20748da99022eabfad9c5a5875326fc32d6e21" Nov 24 01:29:45 crc kubenswrapper[4755]: I1124 01:29:45.076752 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55bcc7b6cd-xzjdz" Nov 24 01:29:45 crc kubenswrapper[4755]: I1124 01:29:45.184462 4755 scope.go:117] "RemoveContainer" containerID="bcf0488860390ca50c5aa5b2512ecdca965b8aa8f5e6379eb739b2408cb67c84" Nov 24 01:29:45 crc kubenswrapper[4755]: I1124 01:29:45.191741 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-55bcc7b6cd-xzjdz"] Nov 24 01:29:45 crc kubenswrapper[4755]: I1124 01:29:45.199076 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-55bcc7b6cd-xzjdz"] Nov 24 01:29:45 crc kubenswrapper[4755]: I1124 01:29:45.344313 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 24 01:29:45 crc kubenswrapper[4755]: I1124 01:29:45.520350 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-55d8c9dc95-pj9b5"] Nov 24 01:29:45 crc kubenswrapper[4755]: W1124 01:29:45.543296 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c6ba259_f60e_4b3f_b901_e42aaff73569.slice/crio-c32392ff6d0d60f81ae04cc29fb8926222f63b5c517189bd10bb07dbe68e1cad WatchSource:0}: Error finding container c32392ff6d0d60f81ae04cc29fb8926222f63b5c517189bd10bb07dbe68e1cad: Status 404 returned error can't find the container with id c32392ff6d0d60f81ae04cc29fb8926222f63b5c517189bd10bb07dbe68e1cad Nov 24 01:29:45 crc kubenswrapper[4755]: I1124 01:29:45.546445 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 24 01:29:46 crc kubenswrapper[4755]: I1124 01:29:46.006620 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbb6d8d0-981e-4309-a478-1f07ea12d6c7" path="/var/lib/kubelet/pods/fbb6d8d0-981e-4309-a478-1f07ea12d6c7/volumes" Nov 24 01:29:46 crc kubenswrapper[4755]: I1124 01:29:46.088320 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1c6ba259-f60e-4b3f-b901-e42aaff73569","Type":"ContainerStarted","Data":"c32392ff6d0d60f81ae04cc29fb8926222f63b5c517189bd10bb07dbe68e1cad"} Nov 24 01:29:46 crc kubenswrapper[4755]: I1124 01:29:46.091255 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-55d8c9dc95-pj9b5" event={"ID":"5e2272ce-0bb7-4cc8-a11b-be4947646efd","Type":"ContainerStarted","Data":"516f18600b086d6c7705e782b185abeeed69689a6d42f3cd2e97a366c68613ee"} Nov 24 01:29:46 crc kubenswrapper[4755]: I1124 01:29:46.091299 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-55d8c9dc95-pj9b5" event={"ID":"5e2272ce-0bb7-4cc8-a11b-be4947646efd","Type":"ContainerStarted","Data":"110443646da10462dac7bbdcc2b10f3bfe532ffc6801b96d5c7eea64b38fcf07"} Nov 24 01:29:46 crc kubenswrapper[4755]: I1124 01:29:46.091314 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-55d8c9dc95-pj9b5" event={"ID":"5e2272ce-0bb7-4cc8-a11b-be4947646efd","Type":"ContainerStarted","Data":"7c25cd97e6634844fdd4922c7b936b87476d3886c265c5fa0ad0c1979cf3225b"} Nov 24 01:29:46 crc kubenswrapper[4755]: I1124 01:29:46.091389 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:46 crc kubenswrapper[4755]: I1124 01:29:46.091578 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:46 crc kubenswrapper[4755]: I1124 01:29:46.124894 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-55d8c9dc95-pj9b5" podStartSLOduration=2.124863959 podStartE2EDuration="2.124863959s" podCreationTimestamp="2025-11-24 01:29:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:29:46.120325924 +0000 UTC m=+1010.806391425" watchObservedRunningTime="2025-11-24 01:29:46.124863959 +0000 UTC m=+1010.810929450" Nov 24 01:29:48 crc kubenswrapper[4755]: I1124 01:29:48.258501 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-55cf755d8-2cns2" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Nov 24 01:29:50 crc kubenswrapper[4755]: I1124 01:29:50.615593 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 24 01:29:52 crc kubenswrapper[4755]: I1124 01:29:52.142703 4755 generic.go:334] "Generic (PLEG): container finished" podID="a7e96f37-574f-4900-88f9-33dc41179807" containerID="e62d17590102a4335196ac2c1b11789315c835ac571282ec3ef7c06495a87c30" exitCode=137 Nov 24 01:29:52 crc kubenswrapper[4755]: I1124 01:29:52.142784 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7e96f37-574f-4900-88f9-33dc41179807","Type":"ContainerDied","Data":"e62d17590102a4335196ac2c1b11789315c835ac571282ec3ef7c06495a87c30"} Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.734405 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-5vmsg"] Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.736012 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5vmsg" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.738194 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmwxh\" (UniqueName: \"kubernetes.io/projected/fd35021d-eaba-4748-b7b5-371fac220837-kube-api-access-tmwxh\") pod \"nova-api-db-create-5vmsg\" (UID: \"fd35021d-eaba-4748-b7b5-371fac220837\") " pod="openstack/nova-api-db-create-5vmsg" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.738286 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd35021d-eaba-4748-b7b5-371fac220837-operator-scripts\") pod \"nova-api-db-create-5vmsg\" (UID: \"fd35021d-eaba-4748-b7b5-371fac220837\") " pod="openstack/nova-api-db-create-5vmsg" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.752704 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-5vmsg"] Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.844756 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-2brc6"] Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.845457 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd35021d-eaba-4748-b7b5-371fac220837-operator-scripts\") pod \"nova-api-db-create-5vmsg\" (UID: \"fd35021d-eaba-4748-b7b5-371fac220837\") " pod="openstack/nova-api-db-create-5vmsg" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.845628 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmwxh\" (UniqueName: \"kubernetes.io/projected/fd35021d-eaba-4748-b7b5-371fac220837-kube-api-access-tmwxh\") pod \"nova-api-db-create-5vmsg\" (UID: \"fd35021d-eaba-4748-b7b5-371fac220837\") " pod="openstack/nova-api-db-create-5vmsg" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.845883 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2brc6" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.847077 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd35021d-eaba-4748-b7b5-371fac220837-operator-scripts\") pod \"nova-api-db-create-5vmsg\" (UID: \"fd35021d-eaba-4748-b7b5-371fac220837\") " pod="openstack/nova-api-db-create-5vmsg" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.853459 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-497f-account-create-vxldj"] Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.869448 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-2brc6"] Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.869535 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-497f-account-create-vxldj" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.871357 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.873536 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-497f-account-create-vxldj"] Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.879824 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmwxh\" (UniqueName: \"kubernetes.io/projected/fd35021d-eaba-4748-b7b5-371fac220837-kube-api-access-tmwxh\") pod \"nova-api-db-create-5vmsg\" (UID: \"fd35021d-eaba-4748-b7b5-371fac220837\") " pod="openstack/nova-api-db-create-5vmsg" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.948142 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddadc31c-aad5-4896-84da-4fbe82710d53-operator-scripts\") pod \"nova-cell0-db-create-2brc6\" (UID: \"ddadc31c-aad5-4896-84da-4fbe82710d53\") " pod="openstack/nova-cell0-db-create-2brc6" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.948209 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr5rh\" (UniqueName: \"kubernetes.io/projected/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-kube-api-access-vr5rh\") pod \"nova-api-497f-account-create-vxldj\" (UID: \"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58\") " pod="openstack/nova-api-497f-account-create-vxldj" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.948229 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-operator-scripts\") pod \"nova-api-497f-account-create-vxldj\" (UID: \"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58\") " pod="openstack/nova-api-497f-account-create-vxldj" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.948247 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tztv2\" (UniqueName: \"kubernetes.io/projected/ddadc31c-aad5-4896-84da-4fbe82710d53-kube-api-access-tztv2\") pod \"nova-cell0-db-create-2brc6\" (UID: \"ddadc31c-aad5-4896-84da-4fbe82710d53\") " pod="openstack/nova-cell0-db-create-2brc6" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.953239 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-frffp"] Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.954446 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-frffp" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.960841 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-frffp"] Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.966089 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:54 crc kubenswrapper[4755]: I1124 01:29:54.967260 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-55d8c9dc95-pj9b5" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.046777 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-b37d-account-create-72hxc"] Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.047836 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b37d-account-create-72hxc" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.049963 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr5rh\" (UniqueName: \"kubernetes.io/projected/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-kube-api-access-vr5rh\") pod \"nova-api-497f-account-create-vxldj\" (UID: \"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58\") " pod="openstack/nova-api-497f-account-create-vxldj" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.050715 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-operator-scripts\") pod \"nova-api-497f-account-create-vxldj\" (UID: \"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58\") " pod="openstack/nova-api-497f-account-create-vxldj" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.050782 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tztv2\" (UniqueName: \"kubernetes.io/projected/ddadc31c-aad5-4896-84da-4fbe82710d53-kube-api-access-tztv2\") pod \"nova-cell0-db-create-2brc6\" (UID: \"ddadc31c-aad5-4896-84da-4fbe82710d53\") " pod="openstack/nova-cell0-db-create-2brc6" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.051088 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddadc31c-aad5-4896-84da-4fbe82710d53-operator-scripts\") pod \"nova-cell0-db-create-2brc6\" (UID: \"ddadc31c-aad5-4896-84da-4fbe82710d53\") " pod="openstack/nova-cell0-db-create-2brc6" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.053247 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddadc31c-aad5-4896-84da-4fbe82710d53-operator-scripts\") pod \"nova-cell0-db-create-2brc6\" (UID: \"ddadc31c-aad5-4896-84da-4fbe82710d53\") " pod="openstack/nova-cell0-db-create-2brc6" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.053297 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-operator-scripts\") pod \"nova-api-497f-account-create-vxldj\" (UID: \"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58\") " pod="openstack/nova-api-497f-account-create-vxldj" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.053583 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5vmsg" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.080063 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.092227 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b37d-account-create-72hxc"] Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.100780 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr5rh\" (UniqueName: \"kubernetes.io/projected/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-kube-api-access-vr5rh\") pod \"nova-api-497f-account-create-vxldj\" (UID: \"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58\") " pod="openstack/nova-api-497f-account-create-vxldj" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.121382 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tztv2\" (UniqueName: \"kubernetes.io/projected/ddadc31c-aad5-4896-84da-4fbe82710d53-kube-api-access-tztv2\") pod \"nova-cell0-db-create-2brc6\" (UID: \"ddadc31c-aad5-4896-84da-4fbe82710d53\") " pod="openstack/nova-cell0-db-create-2brc6" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.152367 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87ee57db-6666-4ced-b558-accfa958ce55-operator-scripts\") pod \"nova-cell1-db-create-frffp\" (UID: \"87ee57db-6666-4ced-b558-accfa958ce55\") " pod="openstack/nova-cell1-db-create-frffp" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.152466 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgn2w\" (UniqueName: \"kubernetes.io/projected/87ee57db-6666-4ced-b558-accfa958ce55-kube-api-access-zgn2w\") pod \"nova-cell1-db-create-frffp\" (UID: \"87ee57db-6666-4ced-b558-accfa958ce55\") " pod="openstack/nova-cell1-db-create-frffp" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.152526 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvjhj\" (UniqueName: \"kubernetes.io/projected/e60b8f75-376b-4ba1-9b41-5f334cec157f-kube-api-access-hvjhj\") pod \"nova-cell0-b37d-account-create-72hxc\" (UID: \"e60b8f75-376b-4ba1-9b41-5f334cec157f\") " pod="openstack/nova-cell0-b37d-account-create-72hxc" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.152605 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60b8f75-376b-4ba1-9b41-5f334cec157f-operator-scripts\") pod \"nova-cell0-b37d-account-create-72hxc\" (UID: \"e60b8f75-376b-4ba1-9b41-5f334cec157f\") " pod="openstack/nova-cell0-b37d-account-create-72hxc" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.162512 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2brc6" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.239870 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-497f-account-create-vxldj" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.249139 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-d94f-account-create-jhmwf"] Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.250671 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d94f-account-create-jhmwf" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.254086 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvjhj\" (UniqueName: \"kubernetes.io/projected/e60b8f75-376b-4ba1-9b41-5f334cec157f-kube-api-access-hvjhj\") pod \"nova-cell0-b37d-account-create-72hxc\" (UID: \"e60b8f75-376b-4ba1-9b41-5f334cec157f\") " pod="openstack/nova-cell0-b37d-account-create-72hxc" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.254160 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60b8f75-376b-4ba1-9b41-5f334cec157f-operator-scripts\") pod \"nova-cell0-b37d-account-create-72hxc\" (UID: \"e60b8f75-376b-4ba1-9b41-5f334cec157f\") " pod="openstack/nova-cell0-b37d-account-create-72hxc" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.254264 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87ee57db-6666-4ced-b558-accfa958ce55-operator-scripts\") pod \"nova-cell1-db-create-frffp\" (UID: \"87ee57db-6666-4ced-b558-accfa958ce55\") " pod="openstack/nova-cell1-db-create-frffp" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.254327 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgn2w\" (UniqueName: \"kubernetes.io/projected/87ee57db-6666-4ced-b558-accfa958ce55-kube-api-access-zgn2w\") pod \"nova-cell1-db-create-frffp\" (UID: \"87ee57db-6666-4ced-b558-accfa958ce55\") " pod="openstack/nova-cell1-db-create-frffp" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.255339 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60b8f75-376b-4ba1-9b41-5f334cec157f-operator-scripts\") pod \"nova-cell0-b37d-account-create-72hxc\" (UID: \"e60b8f75-376b-4ba1-9b41-5f334cec157f\") " pod="openstack/nova-cell0-b37d-account-create-72hxc" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.255448 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87ee57db-6666-4ced-b558-accfa958ce55-operator-scripts\") pod \"nova-cell1-db-create-frffp\" (UID: \"87ee57db-6666-4ced-b558-accfa958ce55\") " pod="openstack/nova-cell1-db-create-frffp" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.257194 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.262925 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-d94f-account-create-jhmwf"] Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.276524 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvjhj\" (UniqueName: \"kubernetes.io/projected/e60b8f75-376b-4ba1-9b41-5f334cec157f-kube-api-access-hvjhj\") pod \"nova-cell0-b37d-account-create-72hxc\" (UID: \"e60b8f75-376b-4ba1-9b41-5f334cec157f\") " pod="openstack/nova-cell0-b37d-account-create-72hxc" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.284069 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgn2w\" (UniqueName: \"kubernetes.io/projected/87ee57db-6666-4ced-b558-accfa958ce55-kube-api-access-zgn2w\") pod \"nova-cell1-db-create-frffp\" (UID: \"87ee57db-6666-4ced-b558-accfa958ce55\") " pod="openstack/nova-cell1-db-create-frffp" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.355949 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-operator-scripts\") pod \"nova-cell1-d94f-account-create-jhmwf\" (UID: \"bc52419e-19f8-4cd9-a1ed-7286e096d6cf\") " pod="openstack/nova-cell1-d94f-account-create-jhmwf" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.356668 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6n7q\" (UniqueName: \"kubernetes.io/projected/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-kube-api-access-h6n7q\") pod \"nova-cell1-d94f-account-create-jhmwf\" (UID: \"bc52419e-19f8-4cd9-a1ed-7286e096d6cf\") " pod="openstack/nova-cell1-d94f-account-create-jhmwf" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.366724 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b37d-account-create-72hxc" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.458392 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6n7q\" (UniqueName: \"kubernetes.io/projected/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-kube-api-access-h6n7q\") pod \"nova-cell1-d94f-account-create-jhmwf\" (UID: \"bc52419e-19f8-4cd9-a1ed-7286e096d6cf\") " pod="openstack/nova-cell1-d94f-account-create-jhmwf" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.458784 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-operator-scripts\") pod \"nova-cell1-d94f-account-create-jhmwf\" (UID: \"bc52419e-19f8-4cd9-a1ed-7286e096d6cf\") " pod="openstack/nova-cell1-d94f-account-create-jhmwf" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.459445 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-operator-scripts\") pod \"nova-cell1-d94f-account-create-jhmwf\" (UID: \"bc52419e-19f8-4cd9-a1ed-7286e096d6cf\") " pod="openstack/nova-cell1-d94f-account-create-jhmwf" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.476206 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6n7q\" (UniqueName: \"kubernetes.io/projected/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-kube-api-access-h6n7q\") pod \"nova-cell1-d94f-account-create-jhmwf\" (UID: \"bc52419e-19f8-4cd9-a1ed-7286e096d6cf\") " pod="openstack/nova-cell1-d94f-account-create-jhmwf" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.577360 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d94f-account-create-jhmwf" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.582460 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-frffp" Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.646336 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.646753 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" containerName="glance-httpd" containerID="cri-o://3437f900209c8262b3253caf386661d184e82f24bd4db4c41fb76179ac5b147a" gracePeriod=30 Nov 24 01:29:55 crc kubenswrapper[4755]: I1124 01:29:55.651746 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" containerName="glance-log" containerID="cri-o://77b5cc9d31c3d1621de26fb42112358d0e9e5730c5093b86ffb25f07b3fc67f6" gracePeriod=30 Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.166337 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.183739 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2srq\" (UniqueName: \"kubernetes.io/projected/a7e96f37-574f-4900-88f9-33dc41179807-kube-api-access-g2srq\") pod \"a7e96f37-574f-4900-88f9-33dc41179807\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.183785 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-log-httpd\") pod \"a7e96f37-574f-4900-88f9-33dc41179807\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.183804 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-config-data\") pod \"a7e96f37-574f-4900-88f9-33dc41179807\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.183862 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-run-httpd\") pod \"a7e96f37-574f-4900-88f9-33dc41179807\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.183910 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-scripts\") pod \"a7e96f37-574f-4900-88f9-33dc41179807\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.183938 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-sg-core-conf-yaml\") pod \"a7e96f37-574f-4900-88f9-33dc41179807\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.183961 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-combined-ca-bundle\") pod \"a7e96f37-574f-4900-88f9-33dc41179807\" (UID: \"a7e96f37-574f-4900-88f9-33dc41179807\") " Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.187581 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a7e96f37-574f-4900-88f9-33dc41179807" (UID: "a7e96f37-574f-4900-88f9-33dc41179807"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.188814 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a7e96f37-574f-4900-88f9-33dc41179807" (UID: "a7e96f37-574f-4900-88f9-33dc41179807"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.194855 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7e96f37-574f-4900-88f9-33dc41179807-kube-api-access-g2srq" (OuterVolumeSpecName: "kube-api-access-g2srq") pod "a7e96f37-574f-4900-88f9-33dc41179807" (UID: "a7e96f37-574f-4900-88f9-33dc41179807"). InnerVolumeSpecName "kube-api-access-g2srq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.209196 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-scripts" (OuterVolumeSpecName: "scripts") pod "a7e96f37-574f-4900-88f9-33dc41179807" (UID: "a7e96f37-574f-4900-88f9-33dc41179807"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.236693 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7e96f37-574f-4900-88f9-33dc41179807","Type":"ContainerDied","Data":"ebbf79190f0a169d1c0adcac9cd978533892aa967b298eecac8c4f32ee983f71"} Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.236746 4755 scope.go:117] "RemoveContainer" containerID="e62d17590102a4335196ac2c1b11789315c835ac571282ec3ef7c06495a87c30" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.236853 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.246904 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a7e96f37-574f-4900-88f9-33dc41179807" (UID: "a7e96f37-574f-4900-88f9-33dc41179807"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.247342 4755 generic.go:334] "Generic (PLEG): container finished" podID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" containerID="77b5cc9d31c3d1621de26fb42112358d0e9e5730c5093b86ffb25f07b3fc67f6" exitCode=143 Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.247377 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6","Type":"ContainerDied","Data":"77b5cc9d31c3d1621de26fb42112358d0e9e5730c5093b86ffb25f07b3fc67f6"} Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.288537 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.288570 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.288585 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2srq\" (UniqueName: \"kubernetes.io/projected/a7e96f37-574f-4900-88f9-33dc41179807-kube-api-access-g2srq\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.288603 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.288628 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7e96f37-574f-4900-88f9-33dc41179807-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.337487 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7e96f37-574f-4900-88f9-33dc41179807" (UID: "a7e96f37-574f-4900-88f9-33dc41179807"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.359182 4755 scope.go:117] "RemoveContainer" containerID="32ef1e5e5d2a7788aad23e607b6e88ccd77e35194ff72e7c65a23a13a8460145" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.367745 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-config-data" (OuterVolumeSpecName: "config-data") pod "a7e96f37-574f-4900-88f9-33dc41179807" (UID: "a7e96f37-574f-4900-88f9-33dc41179807"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.383652 4755 scope.go:117] "RemoveContainer" containerID="f9e6e8f3d6be6e25c707568e1e71cb08d1ef2b9bc56285251a356b84cfbe3a19" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.390064 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.390092 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7e96f37-574f-4900-88f9-33dc41179807-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.390904 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-5vmsg"] Nov 24 01:29:56 crc kubenswrapper[4755]: W1124 01:29:56.392973 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd35021d_eaba_4748_b7b5_371fac220837.slice/crio-ed3a2c236b5ecc6dc7aa600e1866c259d833a508a0c1f57b2e7c202f4bc0710a WatchSource:0}: Error finding container ed3a2c236b5ecc6dc7aa600e1866c259d833a508a0c1f57b2e7c202f4bc0710a: Status 404 returned error can't find the container with id ed3a2c236b5ecc6dc7aa600e1866c259d833a508a0c1f57b2e7c202f4bc0710a Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.631771 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.660693 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.669795 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:29:56 crc kubenswrapper[4755]: E1124 01:29:56.670317 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="proxy-httpd" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.670337 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="proxy-httpd" Nov 24 01:29:56 crc kubenswrapper[4755]: E1124 01:29:56.670354 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="sg-core" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.670361 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="sg-core" Nov 24 01:29:56 crc kubenswrapper[4755]: E1124 01:29:56.670390 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="ceilometer-notification-agent" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.670397 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="ceilometer-notification-agent" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.670575 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="proxy-httpd" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.670615 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="ceilometer-notification-agent" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.670641 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7e96f37-574f-4900-88f9-33dc41179807" containerName="sg-core" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.672681 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.675419 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.675649 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.685997 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.693565 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-run-httpd\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.693642 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkl87\" (UniqueName: \"kubernetes.io/projected/7a8a46b7-de82-4985-9b04-6287ed9d8f30-kube-api-access-nkl87\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.693666 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.693702 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-config-data\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.693822 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-log-httpd\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.693840 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-scripts\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.693876 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.783273 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-497f-account-create-vxldj"] Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.789839 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.794935 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.794994 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-run-httpd\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.795023 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkl87\" (UniqueName: \"kubernetes.io/projected/7a8a46b7-de82-4985-9b04-6287ed9d8f30-kube-api-access-nkl87\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.795043 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.795064 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-config-data\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.795125 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-log-httpd\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.795141 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-scripts\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.796310 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-run-httpd\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.800172 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-log-httpd\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.801493 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-scripts\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.801492 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.803654 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-config-data\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.805089 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-frffp"] Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.807495 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.823465 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkl87\" (UniqueName: \"kubernetes.io/projected/7a8a46b7-de82-4985-9b04-6287ed9d8f30-kube-api-access-nkl87\") pod \"ceilometer-0\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " pod="openstack/ceilometer-0" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.833496 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b37d-account-create-72hxc"] Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.847212 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-2brc6"] Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.853641 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-d94f-account-create-jhmwf"] Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.893419 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.893663 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" containerName="glance-log" containerID="cri-o://4d843b7361a4e224478832ce3c5543fa05a489c9a8224647d78d6682f80906d0" gracePeriod=30 Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.893820 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" containerName="glance-httpd" containerID="cri-o://87d2ffcd285ff3e2495102203a6f65d2a346cefbab759e72ba2700e355a221a8" gracePeriod=30 Nov 24 01:29:56 crc kubenswrapper[4755]: W1124 01:29:56.914561 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc52419e_19f8_4cd9_a1ed_7286e096d6cf.slice/crio-b42286d6efe87da6a83a86ce48f2323725c0922f5fb1e66daf949cd4d692b995 WatchSource:0}: Error finding container b42286d6efe87da6a83a86ce48f2323725c0922f5fb1e66daf949cd4d692b995: Status 404 returned error can't find the container with id b42286d6efe87da6a83a86ce48f2323725c0922f5fb1e66daf949cd4d692b995 Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.935038 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.935253 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 24 01:29:56 crc kubenswrapper[4755]: I1124 01:29:56.987548 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.257799 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b37d-account-create-72hxc" event={"ID":"e60b8f75-376b-4ba1-9b41-5f334cec157f","Type":"ContainerStarted","Data":"4b1364f8a1808898a48fdf1e9991cc38a102ed3ad5739653c2d843fa8354ef9d"} Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.263063 4755 generic.go:334] "Generic (PLEG): container finished" podID="fd35021d-eaba-4748-b7b5-371fac220837" containerID="f24356acf661f84dbe884261cd12ffa9933a5b203528afcfd35dca619f59fd09" exitCode=0 Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.263109 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5vmsg" event={"ID":"fd35021d-eaba-4748-b7b5-371fac220837","Type":"ContainerDied","Data":"f24356acf661f84dbe884261cd12ffa9933a5b203528afcfd35dca619f59fd09"} Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.263128 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5vmsg" event={"ID":"fd35021d-eaba-4748-b7b5-371fac220837","Type":"ContainerStarted","Data":"ed3a2c236b5ecc6dc7aa600e1866c259d833a508a0c1f57b2e7c202f4bc0710a"} Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.265567 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-497f-account-create-vxldj" event={"ID":"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58","Type":"ContainerStarted","Data":"6198f222993a2a81ea440ab6b2570b5a766fe65019ff07dbcddef6e599cc292d"} Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.267190 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-2brc6" event={"ID":"ddadc31c-aad5-4896-84da-4fbe82710d53","Type":"ContainerStarted","Data":"c60f6ee914e6f59b6e36b1f6bca4e1af904a944f468f456ad8fa03f0c168fb1f"} Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.285518 4755 generic.go:334] "Generic (PLEG): container finished" podID="a10198c5-a145-4df1-a99d-14463ff5d048" containerID="4d843b7361a4e224478832ce3c5543fa05a489c9a8224647d78d6682f80906d0" exitCode=143 Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.285629 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a10198c5-a145-4df1-a99d-14463ff5d048","Type":"ContainerDied","Data":"4d843b7361a4e224478832ce3c5543fa05a489c9a8224647d78d6682f80906d0"} Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.290784 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-d94f-account-create-jhmwf" event={"ID":"bc52419e-19f8-4cd9-a1ed-7286e096d6cf","Type":"ContainerStarted","Data":"b42286d6efe87da6a83a86ce48f2323725c0922f5fb1e66daf949cd4d692b995"} Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.294318 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-frffp" event={"ID":"87ee57db-6666-4ced-b558-accfa958ce55","Type":"ContainerStarted","Data":"1888d4fcec1ee0bc54072276241a4bce15fc326b945a2ac5b6e033cfb11658de"} Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.296178 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1c6ba259-f60e-4b3f-b901-e42aaff73569","Type":"ContainerStarted","Data":"742e05f5df8b2d0417eea6b160a19b5e303b52d1c0c4322a6c053432d883b0fd"} Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.313304 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.966988523 podStartE2EDuration="13.313287421s" podCreationTimestamp="2025-11-24 01:29:44 +0000 UTC" firstStartedPulling="2025-11-24 01:29:45.545567465 +0000 UTC m=+1010.231632966" lastFinishedPulling="2025-11-24 01:29:55.891866363 +0000 UTC m=+1020.577931864" observedRunningTime="2025-11-24 01:29:57.31035815 +0000 UTC m=+1021.996423651" watchObservedRunningTime="2025-11-24 01:29:57.313287421 +0000 UTC m=+1021.999352922" Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.463590 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:29:57 crc kubenswrapper[4755]: I1124 01:29:57.869836 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.010175 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7e96f37-574f-4900-88f9-33dc41179807" path="/var/lib/kubelet/pods/a7e96f37-574f-4900-88f9-33dc41179807/volumes" Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.258947 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-55cf755d8-2cns2" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.307291 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc52419e-19f8-4cd9-a1ed-7286e096d6cf" containerID="572ccff292dac1ea788cc9687596e62ec88e6173f6abdd705d35ae407f3de48e" exitCode=0 Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.307362 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-d94f-account-create-jhmwf" event={"ID":"bc52419e-19f8-4cd9-a1ed-7286e096d6cf","Type":"ContainerDied","Data":"572ccff292dac1ea788cc9687596e62ec88e6173f6abdd705d35ae407f3de48e"} Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.308990 4755 generic.go:334] "Generic (PLEG): container finished" podID="fdf82262-5b8f-4ee2-88b6-494e6f4d5b58" containerID="5165a6dd847c67e345aebca9c6fb54cf8252bf310676fad9aa6110a0b068459c" exitCode=0 Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.309030 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-497f-account-create-vxldj" event={"ID":"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58","Type":"ContainerDied","Data":"5165a6dd847c67e345aebca9c6fb54cf8252bf310676fad9aa6110a0b068459c"} Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.310686 4755 generic.go:334] "Generic (PLEG): container finished" podID="ddadc31c-aad5-4896-84da-4fbe82710d53" containerID="610fc7fd70375ef61dbd4e7e904aa258c8763b4d64281d0392a034af90136bc4" exitCode=0 Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.310760 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-2brc6" event={"ID":"ddadc31c-aad5-4896-84da-4fbe82710d53","Type":"ContainerDied","Data":"610fc7fd70375ef61dbd4e7e904aa258c8763b4d64281d0392a034af90136bc4"} Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.312159 4755 generic.go:334] "Generic (PLEG): container finished" podID="87ee57db-6666-4ced-b558-accfa958ce55" containerID="e32ed86fe959509af9beee44386b6725f66c514d040e0677c250079e6ca2540b" exitCode=0 Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.312234 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-frffp" event={"ID":"87ee57db-6666-4ced-b558-accfa958ce55","Type":"ContainerDied","Data":"e32ed86fe959509af9beee44386b6725f66c514d040e0677c250079e6ca2540b"} Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.313617 4755 generic.go:334] "Generic (PLEG): container finished" podID="e60b8f75-376b-4ba1-9b41-5f334cec157f" containerID="8ec192c49adc214e823e98d164706848cc2640b6548607b86b281f0e3088b1f2" exitCode=0 Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.313666 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b37d-account-create-72hxc" event={"ID":"e60b8f75-376b-4ba1-9b41-5f334cec157f","Type":"ContainerDied","Data":"8ec192c49adc214e823e98d164706848cc2640b6548607b86b281f0e3088b1f2"} Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.315036 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a8a46b7-de82-4985-9b04-6287ed9d8f30","Type":"ContainerStarted","Data":"33e7aeeaf709025e9fd12cf2d8c316890d7ae2ded5280523cc14f2ede1ac5246"} Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.672942 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5vmsg" Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.733664 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd35021d-eaba-4748-b7b5-371fac220837-operator-scripts\") pod \"fd35021d-eaba-4748-b7b5-371fac220837\" (UID: \"fd35021d-eaba-4748-b7b5-371fac220837\") " Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.733783 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmwxh\" (UniqueName: \"kubernetes.io/projected/fd35021d-eaba-4748-b7b5-371fac220837-kube-api-access-tmwxh\") pod \"fd35021d-eaba-4748-b7b5-371fac220837\" (UID: \"fd35021d-eaba-4748-b7b5-371fac220837\") " Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.734286 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd35021d-eaba-4748-b7b5-371fac220837-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fd35021d-eaba-4748-b7b5-371fac220837" (UID: "fd35021d-eaba-4748-b7b5-371fac220837"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.738821 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd35021d-eaba-4748-b7b5-371fac220837-kube-api-access-tmwxh" (OuterVolumeSpecName: "kube-api-access-tmwxh") pod "fd35021d-eaba-4748-b7b5-371fac220837" (UID: "fd35021d-eaba-4748-b7b5-371fac220837"). InnerVolumeSpecName "kube-api-access-tmwxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.835192 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd35021d-eaba-4748-b7b5-371fac220837-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:58 crc kubenswrapper[4755]: I1124 01:29:58.835255 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmwxh\" (UniqueName: \"kubernetes.io/projected/fd35021d-eaba-4748-b7b5-371fac220837-kube-api-access-tmwxh\") on node \"crc\" DevicePath \"\"" Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.330078 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a8a46b7-de82-4985-9b04-6287ed9d8f30","Type":"ContainerStarted","Data":"521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e"} Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.333661 4755 generic.go:334] "Generic (PLEG): container finished" podID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" containerID="3437f900209c8262b3253caf386661d184e82f24bd4db4c41fb76179ac5b147a" exitCode=0 Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.334026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6","Type":"ContainerDied","Data":"3437f900209c8262b3253caf386661d184e82f24bd4db4c41fb76179ac5b147a"} Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.336750 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5vmsg" Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.336902 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5vmsg" event={"ID":"fd35021d-eaba-4748-b7b5-371fac220837","Type":"ContainerDied","Data":"ed3a2c236b5ecc6dc7aa600e1866c259d833a508a0c1f57b2e7c202f4bc0710a"} Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.336934 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed3a2c236b5ecc6dc7aa600e1866c259d833a508a0c1f57b2e7c202f4bc0710a" Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.811074 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-497f-account-create-vxldj" Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.814872 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d94f-account-create-jhmwf" Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.965106 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-operator-scripts\") pod \"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58\" (UID: \"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58\") " Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.965176 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6n7q\" (UniqueName: \"kubernetes.io/projected/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-kube-api-access-h6n7q\") pod \"bc52419e-19f8-4cd9-a1ed-7286e096d6cf\" (UID: \"bc52419e-19f8-4cd9-a1ed-7286e096d6cf\") " Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.965248 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-operator-scripts\") pod \"bc52419e-19f8-4cd9-a1ed-7286e096d6cf\" (UID: \"bc52419e-19f8-4cd9-a1ed-7286e096d6cf\") " Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.965302 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr5rh\" (UniqueName: \"kubernetes.io/projected/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-kube-api-access-vr5rh\") pod \"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58\" (UID: \"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58\") " Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.966469 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fdf82262-5b8f-4ee2-88b6-494e6f4d5b58" (UID: "fdf82262-5b8f-4ee2-88b6-494e6f4d5b58"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.966890 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bc52419e-19f8-4cd9-a1ed-7286e096d6cf" (UID: "bc52419e-19f8-4cd9-a1ed-7286e096d6cf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.971844 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-kube-api-access-h6n7q" (OuterVolumeSpecName: "kube-api-access-h6n7q") pod "bc52419e-19f8-4cd9-a1ed-7286e096d6cf" (UID: "bc52419e-19f8-4cd9-a1ed-7286e096d6cf"). InnerVolumeSpecName "kube-api-access-h6n7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:29:59 crc kubenswrapper[4755]: I1124 01:29:59.973361 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-kube-api-access-vr5rh" (OuterVolumeSpecName: "kube-api-access-vr5rh") pod "fdf82262-5b8f-4ee2-88b6-494e6f4d5b58" (UID: "fdf82262-5b8f-4ee2-88b6-494e6f4d5b58"). InnerVolumeSpecName "kube-api-access-vr5rh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.051261 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.151:9292/healthcheck\": read tcp 10.217.0.2:47030->10.217.0.151:9292: read: connection reset by peer" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.053170 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.151:9292/healthcheck\": read tcp 10.217.0.2:47032->10.217.0.151:9292: read: connection reset by peer" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.067410 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6n7q\" (UniqueName: \"kubernetes.io/projected/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-kube-api-access-h6n7q\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.067473 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc52419e-19f8-4cd9-a1ed-7286e096d6cf-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.067484 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr5rh\" (UniqueName: \"kubernetes.io/projected/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-kube-api-access-vr5rh\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.067495 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.126894 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs"] Nov 24 01:30:00 crc kubenswrapper[4755]: E1124 01:30:00.127325 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc52419e-19f8-4cd9-a1ed-7286e096d6cf" containerName="mariadb-account-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.127356 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc52419e-19f8-4cd9-a1ed-7286e096d6cf" containerName="mariadb-account-create" Nov 24 01:30:00 crc kubenswrapper[4755]: E1124 01:30:00.127378 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd35021d-eaba-4748-b7b5-371fac220837" containerName="mariadb-database-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.127384 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd35021d-eaba-4748-b7b5-371fac220837" containerName="mariadb-database-create" Nov 24 01:30:00 crc kubenswrapper[4755]: E1124 01:30:00.127411 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdf82262-5b8f-4ee2-88b6-494e6f4d5b58" containerName="mariadb-account-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.127417 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdf82262-5b8f-4ee2-88b6-494e6f4d5b58" containerName="mariadb-account-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.127670 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd35021d-eaba-4748-b7b5-371fac220837" containerName="mariadb-database-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.127688 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdf82262-5b8f-4ee2-88b6-494e6f4d5b58" containerName="mariadb-account-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.127705 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc52419e-19f8-4cd9-a1ed-7286e096d6cf" containerName="mariadb-account-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.128320 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.129980 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.130132 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.151371 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs"] Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.171000 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6tt4\" (UniqueName: \"kubernetes.io/projected/f520ef1b-1a13-43c3-95cc-66a957b8e41f-kube-api-access-v6tt4\") pod \"collect-profiles-29399130-98xxs\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.171051 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f520ef1b-1a13-43c3-95cc-66a957b8e41f-config-volume\") pod \"collect-profiles-29399130-98xxs\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.171085 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f520ef1b-1a13-43c3-95cc-66a957b8e41f-secret-volume\") pod \"collect-profiles-29399130-98xxs\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.190839 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b37d-account-create-72hxc" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.197373 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2brc6" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.259821 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-frffp" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.272390 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6tt4\" (UniqueName: \"kubernetes.io/projected/f520ef1b-1a13-43c3-95cc-66a957b8e41f-kube-api-access-v6tt4\") pod \"collect-profiles-29399130-98xxs\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.272444 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f520ef1b-1a13-43c3-95cc-66a957b8e41f-config-volume\") pod \"collect-profiles-29399130-98xxs\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.272475 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f520ef1b-1a13-43c3-95cc-66a957b8e41f-secret-volume\") pod \"collect-profiles-29399130-98xxs\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.273538 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f520ef1b-1a13-43c3-95cc-66a957b8e41f-config-volume\") pod \"collect-profiles-29399130-98xxs\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.285178 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.291919 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6tt4\" (UniqueName: \"kubernetes.io/projected/f520ef1b-1a13-43c3-95cc-66a957b8e41f-kube-api-access-v6tt4\") pod \"collect-profiles-29399130-98xxs\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.294661 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f520ef1b-1a13-43c3-95cc-66a957b8e41f-secret-volume\") pod \"collect-profiles-29399130-98xxs\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.315594 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.358948 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a8a46b7-de82-4985-9b04-6287ed9d8f30","Type":"ContainerStarted","Data":"5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308"} Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.361929 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-d94f-account-create-jhmwf" event={"ID":"bc52419e-19f8-4cd9-a1ed-7286e096d6cf","Type":"ContainerDied","Data":"b42286d6efe87da6a83a86ce48f2323725c0922f5fb1e66daf949cd4d692b995"} Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.361962 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b42286d6efe87da6a83a86ce48f2323725c0922f5fb1e66daf949cd4d692b995" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.362014 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d94f-account-create-jhmwf" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.371922 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.372093 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6","Type":"ContainerDied","Data":"3114941a5a679c5158cabefa84ed73aae938e86d32678e80ce08d1b84c53a0e9"} Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.372127 4755 scope.go:117] "RemoveContainer" containerID="3437f900209c8262b3253caf386661d184e82f24bd4db4c41fb76179ac5b147a" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.373140 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvjhj\" (UniqueName: \"kubernetes.io/projected/e60b8f75-376b-4ba1-9b41-5f334cec157f-kube-api-access-hvjhj\") pod \"e60b8f75-376b-4ba1-9b41-5f334cec157f\" (UID: \"e60b8f75-376b-4ba1-9b41-5f334cec157f\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.373206 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87ee57db-6666-4ced-b558-accfa958ce55-operator-scripts\") pod \"87ee57db-6666-4ced-b558-accfa958ce55\" (UID: \"87ee57db-6666-4ced-b558-accfa958ce55\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.373245 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddadc31c-aad5-4896-84da-4fbe82710d53-operator-scripts\") pod \"ddadc31c-aad5-4896-84da-4fbe82710d53\" (UID: \"ddadc31c-aad5-4896-84da-4fbe82710d53\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.373383 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60b8f75-376b-4ba1-9b41-5f334cec157f-operator-scripts\") pod \"e60b8f75-376b-4ba1-9b41-5f334cec157f\" (UID: \"e60b8f75-376b-4ba1-9b41-5f334cec157f\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.373499 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgn2w\" (UniqueName: \"kubernetes.io/projected/87ee57db-6666-4ced-b558-accfa958ce55-kube-api-access-zgn2w\") pod \"87ee57db-6666-4ced-b558-accfa958ce55\" (UID: \"87ee57db-6666-4ced-b558-accfa958ce55\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.374210 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddadc31c-aad5-4896-84da-4fbe82710d53-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ddadc31c-aad5-4896-84da-4fbe82710d53" (UID: "ddadc31c-aad5-4896-84da-4fbe82710d53"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.376063 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tztv2\" (UniqueName: \"kubernetes.io/projected/ddadc31c-aad5-4896-84da-4fbe82710d53-kube-api-access-tztv2\") pod \"ddadc31c-aad5-4896-84da-4fbe82710d53\" (UID: \"ddadc31c-aad5-4896-84da-4fbe82710d53\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.376534 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ee57db-6666-4ced-b558-accfa958ce55-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "87ee57db-6666-4ced-b558-accfa958ce55" (UID: "87ee57db-6666-4ced-b558-accfa958ce55"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.377077 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87ee57db-6666-4ced-b558-accfa958ce55-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.377106 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddadc31c-aad5-4896-84da-4fbe82710d53-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.378892 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e60b8f75-376b-4ba1-9b41-5f334cec157f-kube-api-access-hvjhj" (OuterVolumeSpecName: "kube-api-access-hvjhj") pod "e60b8f75-376b-4ba1-9b41-5f334cec157f" (UID: "e60b8f75-376b-4ba1-9b41-5f334cec157f"). InnerVolumeSpecName "kube-api-access-hvjhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.378929 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-497f-account-create-vxldj" event={"ID":"fdf82262-5b8f-4ee2-88b6-494e6f4d5b58","Type":"ContainerDied","Data":"6198f222993a2a81ea440ab6b2570b5a766fe65019ff07dbcddef6e599cc292d"} Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.379050 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6198f222993a2a81ea440ab6b2570b5a766fe65019ff07dbcddef6e599cc292d" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.378910 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-497f-account-create-vxldj" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.380474 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87ee57db-6666-4ced-b558-accfa958ce55-kube-api-access-zgn2w" (OuterVolumeSpecName: "kube-api-access-zgn2w") pod "87ee57db-6666-4ced-b558-accfa958ce55" (UID: "87ee57db-6666-4ced-b558-accfa958ce55"). InnerVolumeSpecName "kube-api-access-zgn2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.380775 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e60b8f75-376b-4ba1-9b41-5f334cec157f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e60b8f75-376b-4ba1-9b41-5f334cec157f" (UID: "e60b8f75-376b-4ba1-9b41-5f334cec157f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.381690 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddadc31c-aad5-4896-84da-4fbe82710d53-kube-api-access-tztv2" (OuterVolumeSpecName: "kube-api-access-tztv2") pod "ddadc31c-aad5-4896-84da-4fbe82710d53" (UID: "ddadc31c-aad5-4896-84da-4fbe82710d53"). InnerVolumeSpecName "kube-api-access-tztv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.383789 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-2brc6" event={"ID":"ddadc31c-aad5-4896-84da-4fbe82710d53","Type":"ContainerDied","Data":"c60f6ee914e6f59b6e36b1f6bca4e1af904a944f468f456ad8fa03f0c168fb1f"} Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.383833 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c60f6ee914e6f59b6e36b1f6bca4e1af904a944f468f456ad8fa03f0c168fb1f" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.383896 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2brc6" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.400099 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-frffp" event={"ID":"87ee57db-6666-4ced-b558-accfa958ce55","Type":"ContainerDied","Data":"1888d4fcec1ee0bc54072276241a4bce15fc326b945a2ac5b6e033cfb11658de"} Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.400141 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1888d4fcec1ee0bc54072276241a4bce15fc326b945a2ac5b6e033cfb11658de" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.400159 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-frffp" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.401966 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b37d-account-create-72hxc" event={"ID":"e60b8f75-376b-4ba1-9b41-5f334cec157f","Type":"ContainerDied","Data":"4b1364f8a1808898a48fdf1e9991cc38a102ed3ad5739653c2d843fa8354ef9d"} Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.402001 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b1364f8a1808898a48fdf1e9991cc38a102ed3ad5739653c2d843fa8354ef9d" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.402054 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b37d-account-create-72hxc" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.409201 4755 generic.go:334] "Generic (PLEG): container finished" podID="a10198c5-a145-4df1-a99d-14463ff5d048" containerID="87d2ffcd285ff3e2495102203a6f65d2a346cefbab759e72ba2700e355a221a8" exitCode=0 Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.409357 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a10198c5-a145-4df1-a99d-14463ff5d048","Type":"ContainerDied","Data":"87d2ffcd285ff3e2495102203a6f65d2a346cefbab759e72ba2700e355a221a8"} Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.417434 4755 scope.go:117] "RemoveContainer" containerID="77b5cc9d31c3d1621de26fb42112358d0e9e5730c5093b86ffb25f07b3fc67f6" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.478233 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-combined-ca-bundle\") pod \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.478507 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-logs\") pod \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.478543 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-scripts\") pod \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.478593 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.478688 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-config-data\") pod \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.478763 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjzjl\" (UniqueName: \"kubernetes.io/projected/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-kube-api-access-jjzjl\") pod \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.478780 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-httpd-run\") pod \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.478847 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-public-tls-certs\") pod \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\" (UID: \"0d5b1659-28dc-49a7-9b79-e04b9e30f0f6\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.479973 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" (UID: "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.485975 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-logs" (OuterVolumeSpecName: "logs") pod "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" (UID: "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.487220 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvjhj\" (UniqueName: \"kubernetes.io/projected/e60b8f75-376b-4ba1-9b41-5f334cec157f-kube-api-access-hvjhj\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.487249 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60b8f75-376b-4ba1-9b41-5f334cec157f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.487259 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.487268 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgn2w\" (UniqueName: \"kubernetes.io/projected/87ee57db-6666-4ced-b558-accfa958ce55-kube-api-access-zgn2w\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.487277 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tztv2\" (UniqueName: \"kubernetes.io/projected/ddadc31c-aad5-4896-84da-4fbe82710d53-kube-api-access-tztv2\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.487285 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.493470 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" (UID: "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.494010 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-kube-api-access-jjzjl" (OuterVolumeSpecName: "kube-api-access-jjzjl") pod "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" (UID: "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6"). InnerVolumeSpecName "kube-api-access-jjzjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.494009 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-scripts" (OuterVolumeSpecName: "scripts") pod "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" (UID: "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.521261 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" (UID: "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.575367 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-config-data" (OuterVolumeSpecName: "config-data") pod "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" (UID: "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.589911 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.589950 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.589975 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.589984 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.589994 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjzjl\" (UniqueName: \"kubernetes.io/projected/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-kube-api-access-jjzjl\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.612949 4755 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.617294 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.637741 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" (UID: "0d5b1659-28dc-49a7-9b79-e04b9e30f0f6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.691048 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-scripts\") pod \"a10198c5-a145-4df1-a99d-14463ff5d048\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.691123 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-internal-tls-certs\") pod \"a10198c5-a145-4df1-a99d-14463ff5d048\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.691183 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-logs\") pod \"a10198c5-a145-4df1-a99d-14463ff5d048\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.691201 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9q5t\" (UniqueName: \"kubernetes.io/projected/a10198c5-a145-4df1-a99d-14463ff5d048-kube-api-access-k9q5t\") pod \"a10198c5-a145-4df1-a99d-14463ff5d048\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.691238 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-config-data\") pod \"a10198c5-a145-4df1-a99d-14463ff5d048\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.691303 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"a10198c5-a145-4df1-a99d-14463ff5d048\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.691390 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-httpd-run\") pod \"a10198c5-a145-4df1-a99d-14463ff5d048\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.691420 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-combined-ca-bundle\") pod \"a10198c5-a145-4df1-a99d-14463ff5d048\" (UID: \"a10198c5-a145-4df1-a99d-14463ff5d048\") " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.691847 4755 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.691865 4755 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.694507 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-logs" (OuterVolumeSpecName: "logs") pod "a10198c5-a145-4df1-a99d-14463ff5d048" (UID: "a10198c5-a145-4df1-a99d-14463ff5d048"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.695856 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a10198c5-a145-4df1-a99d-14463ff5d048" (UID: "a10198c5-a145-4df1-a99d-14463ff5d048"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.698705 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a10198c5-a145-4df1-a99d-14463ff5d048-kube-api-access-k9q5t" (OuterVolumeSpecName: "kube-api-access-k9q5t") pod "a10198c5-a145-4df1-a99d-14463ff5d048" (UID: "a10198c5-a145-4df1-a99d-14463ff5d048"). InnerVolumeSpecName "kube-api-access-k9q5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.700533 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "a10198c5-a145-4df1-a99d-14463ff5d048" (UID: "a10198c5-a145-4df1-a99d-14463ff5d048"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.706593 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-scripts" (OuterVolumeSpecName: "scripts") pod "a10198c5-a145-4df1-a99d-14463ff5d048" (UID: "a10198c5-a145-4df1-a99d-14463ff5d048"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.738446 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.754750 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a10198c5-a145-4df1-a99d-14463ff5d048" (UID: "a10198c5-a145-4df1-a99d-14463ff5d048"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.760684 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.771520 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:30:00 crc kubenswrapper[4755]: E1124 01:30:00.772068 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" containerName="glance-httpd" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772107 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" containerName="glance-httpd" Nov 24 01:30:00 crc kubenswrapper[4755]: E1124 01:30:00.772122 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" containerName="glance-log" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772129 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" containerName="glance-log" Nov 24 01:30:00 crc kubenswrapper[4755]: E1124 01:30:00.772147 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" containerName="glance-httpd" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772154 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" containerName="glance-httpd" Nov 24 01:30:00 crc kubenswrapper[4755]: E1124 01:30:00.772192 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" containerName="glance-log" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772199 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" containerName="glance-log" Nov 24 01:30:00 crc kubenswrapper[4755]: E1124 01:30:00.772209 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e60b8f75-376b-4ba1-9b41-5f334cec157f" containerName="mariadb-account-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772215 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e60b8f75-376b-4ba1-9b41-5f334cec157f" containerName="mariadb-account-create" Nov 24 01:30:00 crc kubenswrapper[4755]: E1124 01:30:00.772231 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ee57db-6666-4ced-b558-accfa958ce55" containerName="mariadb-database-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772238 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ee57db-6666-4ced-b558-accfa958ce55" containerName="mariadb-database-create" Nov 24 01:30:00 crc kubenswrapper[4755]: E1124 01:30:00.772263 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddadc31c-aad5-4896-84da-4fbe82710d53" containerName="mariadb-database-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772269 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddadc31c-aad5-4896-84da-4fbe82710d53" containerName="mariadb-database-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772524 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="87ee57db-6666-4ced-b558-accfa958ce55" containerName="mariadb-database-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772545 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" containerName="glance-httpd" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772559 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddadc31c-aad5-4896-84da-4fbe82710d53" containerName="mariadb-database-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772587 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" containerName="glance-log" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772674 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e60b8f75-376b-4ba1-9b41-5f334cec157f" containerName="mariadb-account-create" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772715 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" containerName="glance-httpd" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.772732 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" containerName="glance-log" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.789864 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a10198c5-a145-4df1-a99d-14463ff5d048" (UID: "a10198c5-a145-4df1-a99d-14463ff5d048"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.797125 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.797543 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.797669 4755 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.798489 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.800267 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9q5t\" (UniqueName: \"kubernetes.io/projected/a10198c5-a145-4df1-a99d-14463ff5d048-kube-api-access-k9q5t\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.800842 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.800939 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a10198c5-a145-4df1-a99d-14463ff5d048-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.801038 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.800310 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.800354 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.817697 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-config-data" (OuterVolumeSpecName: "config-data") pod "a10198c5-a145-4df1-a99d-14463ff5d048" (UID: "a10198c5-a145-4df1-a99d-14463ff5d048"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.836343 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.868001 4755 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.901087 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs"] Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.902225 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.902315 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.902340 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d19dab7a-f075-4b26-a45f-1542a445a8a6-logs\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.902381 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-config-data\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.902413 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nfvx\" (UniqueName: \"kubernetes.io/projected/d19dab7a-f075-4b26-a45f-1542a445a8a6-kube-api-access-2nfvx\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.902432 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d19dab7a-f075-4b26-a45f-1542a445a8a6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.902479 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.902501 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-scripts\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.902564 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a10198c5-a145-4df1-a99d-14463ff5d048-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:00 crc kubenswrapper[4755]: I1124 01:30:00.902578 4755 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.007932 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-config-data\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.008129 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nfvx\" (UniqueName: \"kubernetes.io/projected/d19dab7a-f075-4b26-a45f-1542a445a8a6-kube-api-access-2nfvx\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.008246 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d19dab7a-f075-4b26-a45f-1542a445a8a6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.008357 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.008471 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-scripts\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.008629 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.008773 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.008871 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d19dab7a-f075-4b26-a45f-1542a445a8a6-logs\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.009658 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d19dab7a-f075-4b26-a45f-1542a445a8a6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.009673 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.009828 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d19dab7a-f075-4b26-a45f-1542a445a8a6-logs\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.012805 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.012901 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-config-data\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.016796 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.025472 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d19dab7a-f075-4b26-a45f-1542a445a8a6-scripts\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.028153 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nfvx\" (UniqueName: \"kubernetes.io/projected/d19dab7a-f075-4b26-a45f-1542a445a8a6-kube-api-access-2nfvx\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.051232 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"d19dab7a-f075-4b26-a45f-1542a445a8a6\") " pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.138853 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.420404 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" event={"ID":"f520ef1b-1a13-43c3-95cc-66a957b8e41f","Type":"ContainerStarted","Data":"9e7adb6d4c85d54897aa534f442bf3c8474b9fc060bc82fb29a3a99955b5af8d"} Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.420699 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" event={"ID":"f520ef1b-1a13-43c3-95cc-66a957b8e41f","Type":"ContainerStarted","Data":"f66e6386f1d8cf7459e198265081ffa7cb53080edd185b285a2b369f80263c7e"} Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.425982 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a8a46b7-de82-4985-9b04-6287ed9d8f30","Type":"ContainerStarted","Data":"b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56"} Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.428209 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a10198c5-a145-4df1-a99d-14463ff5d048","Type":"ContainerDied","Data":"08d45d502b608d93ea437a15a21062307d4b751bbc64ca14cc07d87bdd0d2352"} Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.428333 4755 scope.go:117] "RemoveContainer" containerID="87d2ffcd285ff3e2495102203a6f65d2a346cefbab759e72ba2700e355a221a8" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.428587 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.447392 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" podStartSLOduration=1.447371126 podStartE2EDuration="1.447371126s" podCreationTimestamp="2025-11-24 01:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:01.436472795 +0000 UTC m=+1026.122538306" watchObservedRunningTime="2025-11-24 01:30:01.447371126 +0000 UTC m=+1026.133436627" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.465710 4755 scope.go:117] "RemoveContainer" containerID="4d843b7361a4e224478832ce3c5543fa05a489c9a8224647d78d6682f80906d0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.494560 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.518828 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.531775 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.533256 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.538983 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.539120 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.559231 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:30:01 crc kubenswrapper[4755]: E1124 01:30:01.579006 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda10198c5_a145_4df1_a99d_14463ff5d048.slice/crio-08d45d502b608d93ea437a15a21062307d4b751bbc64ca14cc07d87bdd0d2352\": RecentStats: unable to find data in memory cache]" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.723214 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.723697 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.723730 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.723760 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-logs\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.723790 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.723830 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.723890 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct8fc\" (UniqueName: \"kubernetes.io/projected/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-kube-api-access-ct8fc\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.723922 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.729228 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.825532 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.825599 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct8fc\" (UniqueName: \"kubernetes.io/projected/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-kube-api-access-ct8fc\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.825681 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.825714 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.825816 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.825834 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.825856 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-logs\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.825878 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.826218 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.826311 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.826525 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-logs\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.830952 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.831753 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.833075 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.838264 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.851363 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct8fc\" (UniqueName: \"kubernetes.io/projected/3658a90c-83fd-4a8e-9d15-c1b2cac647f1-kube-api-access-ct8fc\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:01 crc kubenswrapper[4755]: I1124 01:30:01.865155 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"3658a90c-83fd-4a8e-9d15-c1b2cac647f1\") " pod="openstack/glance-default-internal-api-0" Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.016951 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d5b1659-28dc-49a7-9b79-e04b9e30f0f6" path="/var/lib/kubelet/pods/0d5b1659-28dc-49a7-9b79-e04b9e30f0f6/volumes" Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.017802 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a10198c5-a145-4df1-a99d-14463ff5d048" path="/var/lib/kubelet/pods/a10198c5-a145-4df1-a99d-14463ff5d048/volumes" Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.164174 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.451897 4755 generic.go:334] "Generic (PLEG): container finished" podID="f520ef1b-1a13-43c3-95cc-66a957b8e41f" containerID="9e7adb6d4c85d54897aa534f442bf3c8474b9fc060bc82fb29a3a99955b5af8d" exitCode=0 Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.452083 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" event={"ID":"f520ef1b-1a13-43c3-95cc-66a957b8e41f","Type":"ContainerDied","Data":"9e7adb6d4c85d54897aa534f442bf3c8474b9fc060bc82fb29a3a99955b5af8d"} Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.459345 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a8a46b7-de82-4985-9b04-6287ed9d8f30","Type":"ContainerStarted","Data":"ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585"} Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.459502 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="ceilometer-central-agent" containerID="cri-o://521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e" gracePeriod=30 Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.459786 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.459846 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="proxy-httpd" containerID="cri-o://ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585" gracePeriod=30 Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.459904 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="sg-core" containerID="cri-o://b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56" gracePeriod=30 Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.459941 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="ceilometer-notification-agent" containerID="cri-o://5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308" gracePeriod=30 Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.471764 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d19dab7a-f075-4b26-a45f-1542a445a8a6","Type":"ContainerStarted","Data":"bf1d8ee44fbdc78f03fd3df63803b963e97080997091644d950db9a164bee4e2"} Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.732402 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.6046107 podStartE2EDuration="6.732379321s" podCreationTimestamp="2025-11-24 01:29:56 +0000 UTC" firstStartedPulling="2025-11-24 01:29:57.470729155 +0000 UTC m=+1022.156794656" lastFinishedPulling="2025-11-24 01:30:01.598497776 +0000 UTC m=+1026.284563277" observedRunningTime="2025-11-24 01:30:02.496917454 +0000 UTC m=+1027.182982955" watchObservedRunningTime="2025-11-24 01:30:02.732379321 +0000 UTC m=+1027.418444922" Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.735880 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 24 01:30:02 crc kubenswrapper[4755]: W1124 01:30:02.743933 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3658a90c_83fd_4a8e_9d15_c1b2cac647f1.slice/crio-8ebc660cb118652dc1718eb1b5ad245b11fb1b1b0559a261ae99fd36ff1c766b WatchSource:0}: Error finding container 8ebc660cb118652dc1718eb1b5ad245b11fb1b1b0559a261ae99fd36ff1c766b: Status 404 returned error can't find the container with id 8ebc660cb118652dc1718eb1b5ad245b11fb1b1b0559a261ae99fd36ff1c766b Nov 24 01:30:02 crc kubenswrapper[4755]: I1124 01:30:02.888330 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.048081 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-logs\") pod \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.048541 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-config-data\") pod \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.048588 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-tls-certs\") pod \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.048678 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-combined-ca-bundle\") pod \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.048729 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnrmt\" (UniqueName: \"kubernetes.io/projected/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-kube-api-access-lnrmt\") pod \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.048804 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-scripts\") pod \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.048933 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-logs" (OuterVolumeSpecName: "logs") pod "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" (UID: "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.048974 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-secret-key\") pod \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\" (UID: \"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb\") " Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.049522 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.054544 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" (UID: "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.070950 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-kube-api-access-lnrmt" (OuterVolumeSpecName: "kube-api-access-lnrmt") pod "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" (UID: "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb"). InnerVolumeSpecName "kube-api-access-lnrmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.084440 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-config-data" (OuterVolumeSpecName: "config-data") pod "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" (UID: "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.096074 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-scripts" (OuterVolumeSpecName: "scripts") pod "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" (UID: "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.099834 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" (UID: "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.127055 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" (UID: "dc7447cb-d4c3-48d1-8cd3-065d5eee21cb"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.151185 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.151218 4755 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.151228 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.151239 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnrmt\" (UniqueName: \"kubernetes.io/projected/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-kube-api-access-lnrmt\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.151247 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.151255 4755 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.487845 4755 generic.go:334] "Generic (PLEG): container finished" podID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerID="ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585" exitCode=0 Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.488200 4755 generic.go:334] "Generic (PLEG): container finished" podID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerID="b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56" exitCode=2 Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.488217 4755 generic.go:334] "Generic (PLEG): container finished" podID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerID="5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308" exitCode=0 Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.487926 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a8a46b7-de82-4985-9b04-6287ed9d8f30","Type":"ContainerDied","Data":"ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585"} Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.488276 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a8a46b7-de82-4985-9b04-6287ed9d8f30","Type":"ContainerDied","Data":"b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56"} Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.488298 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a8a46b7-de82-4985-9b04-6287ed9d8f30","Type":"ContainerDied","Data":"5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308"} Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.490575 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d19dab7a-f075-4b26-a45f-1542a445a8a6","Type":"ContainerStarted","Data":"6d903ddbea7f3e4d9f3c34f0a8ae0cde5e035ffe036cedb8834142c8707a9922"} Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.490643 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d19dab7a-f075-4b26-a45f-1542a445a8a6","Type":"ContainerStarted","Data":"e9d912d8b6258b521198d1bf5a20dcca57c7a856a239b29edd7feb5ed5778a05"} Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.494563 4755 generic.go:334] "Generic (PLEG): container finished" podID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerID="1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99" exitCode=137 Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.494673 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55cf755d8-2cns2" event={"ID":"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb","Type":"ContainerDied","Data":"1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99"} Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.494703 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55cf755d8-2cns2" event={"ID":"dc7447cb-d4c3-48d1-8cd3-065d5eee21cb","Type":"ContainerDied","Data":"5e7e4e1884508e5dd72078cfd6c96f499ccf8a4efb96a0bdd0c0915bed4596bb"} Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.494724 4755 scope.go:117] "RemoveContainer" containerID="4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.494898 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55cf755d8-2cns2" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.511741 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3658a90c-83fd-4a8e-9d15-c1b2cac647f1","Type":"ContainerStarted","Data":"57bca2eb084734bd4608d6cea886f6e6888f2277ee44e843b6914c7ce43ea03c"} Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.511805 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3658a90c-83fd-4a8e-9d15-c1b2cac647f1","Type":"ContainerStarted","Data":"8ebc660cb118652dc1718eb1b5ad245b11fb1b1b0559a261ae99fd36ff1c766b"} Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.525941 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.525919096 podStartE2EDuration="3.525919096s" podCreationTimestamp="2025-11-24 01:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:03.510639895 +0000 UTC m=+1028.196705416" watchObservedRunningTime="2025-11-24 01:30:03.525919096 +0000 UTC m=+1028.211984597" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.549763 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-55cf755d8-2cns2"] Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.556570 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-55cf755d8-2cns2"] Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.807829 4755 scope.go:117] "RemoveContainer" containerID="1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.834472 4755 scope.go:117] "RemoveContainer" containerID="4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b" Nov 24 01:30:03 crc kubenswrapper[4755]: E1124 01:30:03.835202 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b\": container with ID starting with 4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b not found: ID does not exist" containerID="4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.835239 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b"} err="failed to get container status \"4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b\": rpc error: code = NotFound desc = could not find container \"4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b\": container with ID starting with 4beef73f15aa24bc366a07ee6b65668580a078af7defa6b5c6d228dcbe01790b not found: ID does not exist" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.835264 4755 scope.go:117] "RemoveContainer" containerID="1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99" Nov 24 01:30:03 crc kubenswrapper[4755]: E1124 01:30:03.835508 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99\": container with ID starting with 1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99 not found: ID does not exist" containerID="1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.835524 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99"} err="failed to get container status \"1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99\": rpc error: code = NotFound desc = could not find container \"1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99\": container with ID starting with 1d442e32d84d4e2cff3318b61b7cba05d68456587c4ceec4d872c6c06fa3bf99 not found: ID does not exist" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.899374 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.972963 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6tt4\" (UniqueName: \"kubernetes.io/projected/f520ef1b-1a13-43c3-95cc-66a957b8e41f-kube-api-access-v6tt4\") pod \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.973050 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f520ef1b-1a13-43c3-95cc-66a957b8e41f-secret-volume\") pod \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.973137 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f520ef1b-1a13-43c3-95cc-66a957b8e41f-config-volume\") pod \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\" (UID: \"f520ef1b-1a13-43c3-95cc-66a957b8e41f\") " Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.974181 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f520ef1b-1a13-43c3-95cc-66a957b8e41f-config-volume" (OuterVolumeSpecName: "config-volume") pod "f520ef1b-1a13-43c3-95cc-66a957b8e41f" (UID: "f520ef1b-1a13-43c3-95cc-66a957b8e41f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.977828 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f520ef1b-1a13-43c3-95cc-66a957b8e41f-kube-api-access-v6tt4" (OuterVolumeSpecName: "kube-api-access-v6tt4") pod "f520ef1b-1a13-43c3-95cc-66a957b8e41f" (UID: "f520ef1b-1a13-43c3-95cc-66a957b8e41f"). InnerVolumeSpecName "kube-api-access-v6tt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:03 crc kubenswrapper[4755]: I1124 01:30:03.978190 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f520ef1b-1a13-43c3-95cc-66a957b8e41f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f520ef1b-1a13-43c3-95cc-66a957b8e41f" (UID: "f520ef1b-1a13-43c3-95cc-66a957b8e41f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:04 crc kubenswrapper[4755]: I1124 01:30:04.014683 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" path="/var/lib/kubelet/pods/dc7447cb-d4c3-48d1-8cd3-065d5eee21cb/volumes" Nov 24 01:30:04 crc kubenswrapper[4755]: I1124 01:30:04.075340 4755 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f520ef1b-1a13-43c3-95cc-66a957b8e41f-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:04 crc kubenswrapper[4755]: I1124 01:30:04.075370 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f520ef1b-1a13-43c3-95cc-66a957b8e41f-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:04 crc kubenswrapper[4755]: I1124 01:30:04.075380 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6tt4\" (UniqueName: \"kubernetes.io/projected/f520ef1b-1a13-43c3-95cc-66a957b8e41f-kube-api-access-v6tt4\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:04 crc kubenswrapper[4755]: I1124 01:30:04.524735 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3658a90c-83fd-4a8e-9d15-c1b2cac647f1","Type":"ContainerStarted","Data":"0ac3fb1d7f0a91f8c03495ee9e6f2faab162cc2d654ae46e9ca1e823fd8d83ac"} Nov 24 01:30:04 crc kubenswrapper[4755]: I1124 01:30:04.527839 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" event={"ID":"f520ef1b-1a13-43c3-95cc-66a957b8e41f","Type":"ContainerDied","Data":"f66e6386f1d8cf7459e198265081ffa7cb53080edd185b285a2b369f80263c7e"} Nov 24 01:30:04 crc kubenswrapper[4755]: I1124 01:30:04.527877 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f66e6386f1d8cf7459e198265081ffa7cb53080edd185b285a2b369f80263c7e" Nov 24 01:30:04 crc kubenswrapper[4755]: I1124 01:30:04.527884 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs" Nov 24 01:30:04 crc kubenswrapper[4755]: I1124 01:30:04.547487 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.547465641 podStartE2EDuration="3.547465641s" podCreationTimestamp="2025-11-24 01:30:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:04.543001128 +0000 UTC m=+1029.229066629" watchObservedRunningTime="2025-11-24 01:30:04.547465641 +0000 UTC m=+1029.233531142" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.291559 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hqx5z"] Nov 24 01:30:05 crc kubenswrapper[4755]: E1124 01:30:05.291928 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.291941 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon" Nov 24 01:30:05 crc kubenswrapper[4755]: E1124 01:30:05.291975 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f520ef1b-1a13-43c3-95cc-66a957b8e41f" containerName="collect-profiles" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.291981 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f520ef1b-1a13-43c3-95cc-66a957b8e41f" containerName="collect-profiles" Nov 24 01:30:05 crc kubenswrapper[4755]: E1124 01:30:05.291994 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon-log" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.292002 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon-log" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.292175 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon-log" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.292188 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc7447cb-d4c3-48d1-8cd3-065d5eee21cb" containerName="horizon" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.292200 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f520ef1b-1a13-43c3-95cc-66a957b8e41f" containerName="collect-profiles" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.292817 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.297186 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-t8zjp" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.298205 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.300945 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.305007 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hqx5z"] Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.397201 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-scripts\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.397282 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nrk4\" (UniqueName: \"kubernetes.io/projected/8965375e-ebca-4829-8445-54dabd02845f-kube-api-access-2nrk4\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.397317 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-config-data\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.397437 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.499039 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-scripts\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.499165 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nrk4\" (UniqueName: \"kubernetes.io/projected/8965375e-ebca-4829-8445-54dabd02845f-kube-api-access-2nrk4\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.499216 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-config-data\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.499261 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.506165 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-scripts\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.506429 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.517995 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-config-data\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.520106 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nrk4\" (UniqueName: \"kubernetes.io/projected/8965375e-ebca-4829-8445-54dabd02845f-kube-api-access-2nrk4\") pod \"nova-cell0-conductor-db-sync-hqx5z\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:05 crc kubenswrapper[4755]: I1124 01:30:05.612353 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:06 crc kubenswrapper[4755]: I1124 01:30:06.114597 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hqx5z"] Nov 24 01:30:06 crc kubenswrapper[4755]: W1124 01:30:06.123482 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8965375e_ebca_4829_8445_54dabd02845f.slice/crio-31c22c7941a4c75e9bd953e5fd209c1c196d7f5973f4eeabe42dffe31d796923 WatchSource:0}: Error finding container 31c22c7941a4c75e9bd953e5fd209c1c196d7f5973f4eeabe42dffe31d796923: Status 404 returned error can't find the container with id 31c22c7941a4c75e9bd953e5fd209c1c196d7f5973f4eeabe42dffe31d796923 Nov 24 01:30:06 crc kubenswrapper[4755]: I1124 01:30:06.547030 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hqx5z" event={"ID":"8965375e-ebca-4829-8445-54dabd02845f","Type":"ContainerStarted","Data":"31c22c7941a4c75e9bd953e5fd209c1c196d7f5973f4eeabe42dffe31d796923"} Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.517686 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.573894 4755 generic.go:334] "Generic (PLEG): container finished" podID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerID="521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e" exitCode=0 Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.573946 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a8a46b7-de82-4985-9b04-6287ed9d8f30","Type":"ContainerDied","Data":"521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e"} Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.573978 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a8a46b7-de82-4985-9b04-6287ed9d8f30","Type":"ContainerDied","Data":"33e7aeeaf709025e9fd12cf2d8c316890d7ae2ded5280523cc14f2ede1ac5246"} Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.573999 4755 scope.go:117] "RemoveContainer" containerID="ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.574158 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.599229 4755 scope.go:117] "RemoveContainer" containerID="b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.617970 4755 scope.go:117] "RemoveContainer" containerID="5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.638243 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-log-httpd\") pod \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.638392 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-config-data\") pod \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.638430 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-scripts\") pod \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.638452 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-combined-ca-bundle\") pod \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.638531 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-run-httpd\") pod \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.638564 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-sg-core-conf-yaml\") pod \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.638584 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkl87\" (UniqueName: \"kubernetes.io/projected/7a8a46b7-de82-4985-9b04-6287ed9d8f30-kube-api-access-nkl87\") pod \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\" (UID: \"7a8a46b7-de82-4985-9b04-6287ed9d8f30\") " Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.640066 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7a8a46b7-de82-4985-9b04-6287ed9d8f30" (UID: "7a8a46b7-de82-4985-9b04-6287ed9d8f30"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.640336 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7a8a46b7-de82-4985-9b04-6287ed9d8f30" (UID: "7a8a46b7-de82-4985-9b04-6287ed9d8f30"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.642921 4755 scope.go:117] "RemoveContainer" containerID="521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.651030 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a8a46b7-de82-4985-9b04-6287ed9d8f30-kube-api-access-nkl87" (OuterVolumeSpecName: "kube-api-access-nkl87") pod "7a8a46b7-de82-4985-9b04-6287ed9d8f30" (UID: "7a8a46b7-de82-4985-9b04-6287ed9d8f30"). InnerVolumeSpecName "kube-api-access-nkl87". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.664683 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-scripts" (OuterVolumeSpecName: "scripts") pod "7a8a46b7-de82-4985-9b04-6287ed9d8f30" (UID: "7a8a46b7-de82-4985-9b04-6287ed9d8f30"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.672850 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7a8a46b7-de82-4985-9b04-6287ed9d8f30" (UID: "7a8a46b7-de82-4985-9b04-6287ed9d8f30"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.725726 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a8a46b7-de82-4985-9b04-6287ed9d8f30" (UID: "7a8a46b7-de82-4985-9b04-6287ed9d8f30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.738872 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-config-data" (OuterVolumeSpecName: "config-data") pod "7a8a46b7-de82-4985-9b04-6287ed9d8f30" (UID: "7a8a46b7-de82-4985-9b04-6287ed9d8f30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.740968 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.741001 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.741010 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.741019 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkl87\" (UniqueName: \"kubernetes.io/projected/7a8a46b7-de82-4985-9b04-6287ed9d8f30-kube-api-access-nkl87\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.741028 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a8a46b7-de82-4985-9b04-6287ed9d8f30-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.741036 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.741057 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a8a46b7-de82-4985-9b04-6287ed9d8f30-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.755651 4755 scope.go:117] "RemoveContainer" containerID="ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585" Nov 24 01:30:07 crc kubenswrapper[4755]: E1124 01:30:07.756112 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585\": container with ID starting with ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585 not found: ID does not exist" containerID="ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.756150 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585"} err="failed to get container status \"ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585\": rpc error: code = NotFound desc = could not find container \"ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585\": container with ID starting with ebfc187fbf3b83dc965361a5aa6f08d20fb73a142801e234b6b252e8da6ec585 not found: ID does not exist" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.756175 4755 scope.go:117] "RemoveContainer" containerID="b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56" Nov 24 01:30:07 crc kubenswrapper[4755]: E1124 01:30:07.756564 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56\": container with ID starting with b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56 not found: ID does not exist" containerID="b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.756628 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56"} err="failed to get container status \"b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56\": rpc error: code = NotFound desc = could not find container \"b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56\": container with ID starting with b495d6e2624befeaa77f3763146f089d43b4a72cc435952d093e4b9a42d90a56 not found: ID does not exist" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.756658 4755 scope.go:117] "RemoveContainer" containerID="5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308" Nov 24 01:30:07 crc kubenswrapper[4755]: E1124 01:30:07.757087 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308\": container with ID starting with 5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308 not found: ID does not exist" containerID="5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.757122 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308"} err="failed to get container status \"5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308\": rpc error: code = NotFound desc = could not find container \"5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308\": container with ID starting with 5a01d0bc538f5c8fed0e8cefc2b624a998bfba13b69b195b454568ed2ee77308 not found: ID does not exist" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.757148 4755 scope.go:117] "RemoveContainer" containerID="521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e" Nov 24 01:30:07 crc kubenswrapper[4755]: E1124 01:30:07.757895 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e\": container with ID starting with 521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e not found: ID does not exist" containerID="521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.757920 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e"} err="failed to get container status \"521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e\": rpc error: code = NotFound desc = could not find container \"521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e\": container with ID starting with 521bc80ec427df057b5e0e414d7cec75a2344f7f05cf577494f18b0979ca2f3e not found: ID does not exist" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.922045 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.950936 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.974632 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:07 crc kubenswrapper[4755]: E1124 01:30:07.975591 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="ceilometer-notification-agent" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.975628 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="ceilometer-notification-agent" Nov 24 01:30:07 crc kubenswrapper[4755]: E1124 01:30:07.975645 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="ceilometer-central-agent" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.975654 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="ceilometer-central-agent" Nov 24 01:30:07 crc kubenswrapper[4755]: E1124 01:30:07.975684 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="sg-core" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.975726 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="sg-core" Nov 24 01:30:07 crc kubenswrapper[4755]: E1124 01:30:07.975738 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="proxy-httpd" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.975745 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="proxy-httpd" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.976068 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="sg-core" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.976125 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="ceilometer-central-agent" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.976147 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="ceilometer-notification-agent" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.976160 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" containerName="proxy-httpd" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.978301 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.980445 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.981883 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:07 crc kubenswrapper[4755]: I1124 01:30:07.984865 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.014856 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a8a46b7-de82-4985-9b04-6287ed9d8f30" path="/var/lib/kubelet/pods/7a8a46b7-de82-4985-9b04-6287ed9d8f30/volumes" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.047244 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-log-httpd\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.047291 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.047347 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.047435 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwpkw\" (UniqueName: \"kubernetes.io/projected/e6f9da38-454d-445a-9cf7-4c24b425602e-kube-api-access-pwpkw\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.047456 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-config-data\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.047505 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-run-httpd\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.047526 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-scripts\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.149374 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwpkw\" (UniqueName: \"kubernetes.io/projected/e6f9da38-454d-445a-9cf7-4c24b425602e-kube-api-access-pwpkw\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.149426 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-config-data\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.149460 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-run-httpd\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.149477 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-scripts\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.149539 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-log-httpd\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.149562 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.149616 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.149929 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-run-httpd\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.150081 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-log-httpd\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.153501 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.154316 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.155367 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-config-data\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.157955 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-scripts\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.171362 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwpkw\" (UniqueName: \"kubernetes.io/projected/e6f9da38-454d-445a-9cf7-4c24b425602e-kube-api-access-pwpkw\") pod \"ceilometer-0\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.303069 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:08 crc kubenswrapper[4755]: I1124 01:30:08.805423 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:08 crc kubenswrapper[4755]: W1124 01:30:08.816162 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6f9da38_454d_445a_9cf7_4c24b425602e.slice/crio-00f02e304b9e577ee6a101d1809dfc1da9bd44a7b3bc9728c72688ef0ab4e3d7 WatchSource:0}: Error finding container 00f02e304b9e577ee6a101d1809dfc1da9bd44a7b3bc9728c72688ef0ab4e3d7: Status 404 returned error can't find the container with id 00f02e304b9e577ee6a101d1809dfc1da9bd44a7b3bc9728c72688ef0ab4e3d7 Nov 24 01:30:09 crc kubenswrapper[4755]: I1124 01:30:09.593011 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6f9da38-454d-445a-9cf7-4c24b425602e","Type":"ContainerStarted","Data":"00f02e304b9e577ee6a101d1809dfc1da9bd44a7b3bc9728c72688ef0ab4e3d7"} Nov 24 01:30:09 crc kubenswrapper[4755]: I1124 01:30:09.673989 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:11 crc kubenswrapper[4755]: I1124 01:30:11.140470 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 24 01:30:11 crc kubenswrapper[4755]: I1124 01:30:11.140536 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 24 01:30:11 crc kubenswrapper[4755]: I1124 01:30:11.183285 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 24 01:30:11 crc kubenswrapper[4755]: I1124 01:30:11.187238 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 24 01:30:11 crc kubenswrapper[4755]: I1124 01:30:11.613094 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 24 01:30:11 crc kubenswrapper[4755]: I1124 01:30:11.613405 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 24 01:30:12 crc kubenswrapper[4755]: I1124 01:30:12.165776 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:12 crc kubenswrapper[4755]: I1124 01:30:12.165829 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:12 crc kubenswrapper[4755]: I1124 01:30:12.205192 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:12 crc kubenswrapper[4755]: I1124 01:30:12.212169 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:12 crc kubenswrapper[4755]: I1124 01:30:12.621854 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:12 crc kubenswrapper[4755]: I1124 01:30:12.622141 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:13 crc kubenswrapper[4755]: I1124 01:30:13.445138 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 24 01:30:13 crc kubenswrapper[4755]: I1124 01:30:13.636459 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 24 01:30:14 crc kubenswrapper[4755]: I1124 01:30:14.648696 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6f9da38-454d-445a-9cf7-4c24b425602e","Type":"ContainerStarted","Data":"7009ef781687154ed24fc6c270759033b86caad48723701ddd7cad985f6625c5"} Nov 24 01:30:14 crc kubenswrapper[4755]: I1124 01:30:14.649244 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6f9da38-454d-445a-9cf7-4c24b425602e","Type":"ContainerStarted","Data":"a92f1d26cfff10b0cb2ad7a6573b2c5e09cfc8d425ac085227f4051bc15f32e3"} Nov 24 01:30:14 crc kubenswrapper[4755]: I1124 01:30:14.650739 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hqx5z" event={"ID":"8965375e-ebca-4829-8445-54dabd02845f","Type":"ContainerStarted","Data":"c317437f3833ffa9c4bba705ee2105f6533d6147f55bd6b859fe9bdc40389d93"} Nov 24 01:30:14 crc kubenswrapper[4755]: I1124 01:30:14.673552 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-hqx5z" podStartSLOduration=2.121255296 podStartE2EDuration="9.673531343s" podCreationTimestamp="2025-11-24 01:30:05 +0000 UTC" firstStartedPulling="2025-11-24 01:30:06.126955682 +0000 UTC m=+1030.813021183" lastFinishedPulling="2025-11-24 01:30:13.679231729 +0000 UTC m=+1038.365297230" observedRunningTime="2025-11-24 01:30:14.669759149 +0000 UTC m=+1039.355824650" watchObservedRunningTime="2025-11-24 01:30:14.673531343 +0000 UTC m=+1039.359596844" Nov 24 01:30:15 crc kubenswrapper[4755]: I1124 01:30:15.297239 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:15 crc kubenswrapper[4755]: I1124 01:30:15.297658 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 01:30:15 crc kubenswrapper[4755]: I1124 01:30:15.300854 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 24 01:30:15 crc kubenswrapper[4755]: I1124 01:30:15.660828 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6f9da38-454d-445a-9cf7-4c24b425602e","Type":"ContainerStarted","Data":"7a6ef394f7aa0d91698b5d5610268191bd571fcb3803effa7df15bf87b4fdf0c"} Nov 24 01:30:17 crc kubenswrapper[4755]: I1124 01:30:17.684920 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6f9da38-454d-445a-9cf7-4c24b425602e","Type":"ContainerStarted","Data":"77939ae4247afdab8d0ff8a63f902edd0505c40845933732ae526da4d985dd49"} Nov 24 01:30:17 crc kubenswrapper[4755]: I1124 01:30:17.685455 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 24 01:30:17 crc kubenswrapper[4755]: I1124 01:30:17.685285 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="proxy-httpd" containerID="cri-o://77939ae4247afdab8d0ff8a63f902edd0505c40845933732ae526da4d985dd49" gracePeriod=30 Nov 24 01:30:17 crc kubenswrapper[4755]: I1124 01:30:17.685283 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="sg-core" containerID="cri-o://7a6ef394f7aa0d91698b5d5610268191bd571fcb3803effa7df15bf87b4fdf0c" gracePeriod=30 Nov 24 01:30:17 crc kubenswrapper[4755]: I1124 01:30:17.685317 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="ceilometer-notification-agent" containerID="cri-o://7009ef781687154ed24fc6c270759033b86caad48723701ddd7cad985f6625c5" gracePeriod=30 Nov 24 01:30:17 crc kubenswrapper[4755]: I1124 01:30:17.685221 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="ceilometer-central-agent" containerID="cri-o://a92f1d26cfff10b0cb2ad7a6573b2c5e09cfc8d425ac085227f4051bc15f32e3" gracePeriod=30 Nov 24 01:30:17 crc kubenswrapper[4755]: I1124 01:30:17.728435 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.444111976 podStartE2EDuration="10.728404771s" podCreationTimestamp="2025-11-24 01:30:07 +0000 UTC" firstStartedPulling="2025-11-24 01:30:08.819726668 +0000 UTC m=+1033.505792169" lastFinishedPulling="2025-11-24 01:30:17.104019463 +0000 UTC m=+1041.790084964" observedRunningTime="2025-11-24 01:30:17.715257598 +0000 UTC m=+1042.401323149" watchObservedRunningTime="2025-11-24 01:30:17.728404771 +0000 UTC m=+1042.414470292" Nov 24 01:30:18 crc kubenswrapper[4755]: I1124 01:30:18.697166 4755 generic.go:334] "Generic (PLEG): container finished" podID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerID="77939ae4247afdab8d0ff8a63f902edd0505c40845933732ae526da4d985dd49" exitCode=0 Nov 24 01:30:18 crc kubenswrapper[4755]: I1124 01:30:18.697201 4755 generic.go:334] "Generic (PLEG): container finished" podID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerID="7a6ef394f7aa0d91698b5d5610268191bd571fcb3803effa7df15bf87b4fdf0c" exitCode=2 Nov 24 01:30:18 crc kubenswrapper[4755]: I1124 01:30:18.697209 4755 generic.go:334] "Generic (PLEG): container finished" podID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerID="7009ef781687154ed24fc6c270759033b86caad48723701ddd7cad985f6625c5" exitCode=0 Nov 24 01:30:18 crc kubenswrapper[4755]: I1124 01:30:18.697228 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6f9da38-454d-445a-9cf7-4c24b425602e","Type":"ContainerDied","Data":"77939ae4247afdab8d0ff8a63f902edd0505c40845933732ae526da4d985dd49"} Nov 24 01:30:18 crc kubenswrapper[4755]: I1124 01:30:18.697258 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6f9da38-454d-445a-9cf7-4c24b425602e","Type":"ContainerDied","Data":"7a6ef394f7aa0d91698b5d5610268191bd571fcb3803effa7df15bf87b4fdf0c"} Nov 24 01:30:18 crc kubenswrapper[4755]: I1124 01:30:18.697267 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6f9da38-454d-445a-9cf7-4c24b425602e","Type":"ContainerDied","Data":"7009ef781687154ed24fc6c270759033b86caad48723701ddd7cad985f6625c5"} Nov 24 01:30:22 crc kubenswrapper[4755]: E1124 01:30:22.077953 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6f9da38_454d_445a_9cf7_4c24b425602e.slice/crio-conmon-a92f1d26cfff10b0cb2ad7a6573b2c5e09cfc8d425ac085227f4051bc15f32e3.scope\": RecentStats: unable to find data in memory cache]" Nov 24 01:30:22 crc kubenswrapper[4755]: I1124 01:30:22.750527 4755 generic.go:334] "Generic (PLEG): container finished" podID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerID="a92f1d26cfff10b0cb2ad7a6573b2c5e09cfc8d425ac085227f4051bc15f32e3" exitCode=0 Nov 24 01:30:22 crc kubenswrapper[4755]: I1124 01:30:22.750584 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6f9da38-454d-445a-9cf7-4c24b425602e","Type":"ContainerDied","Data":"a92f1d26cfff10b0cb2ad7a6573b2c5e09cfc8d425ac085227f4051bc15f32e3"} Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.217961 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.352664 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-config-data\") pod \"e6f9da38-454d-445a-9cf7-4c24b425602e\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.352810 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-scripts\") pod \"e6f9da38-454d-445a-9cf7-4c24b425602e\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.352849 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-combined-ca-bundle\") pod \"e6f9da38-454d-445a-9cf7-4c24b425602e\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.352885 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwpkw\" (UniqueName: \"kubernetes.io/projected/e6f9da38-454d-445a-9cf7-4c24b425602e-kube-api-access-pwpkw\") pod \"e6f9da38-454d-445a-9cf7-4c24b425602e\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.352911 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-sg-core-conf-yaml\") pod \"e6f9da38-454d-445a-9cf7-4c24b425602e\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.352936 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-log-httpd\") pod \"e6f9da38-454d-445a-9cf7-4c24b425602e\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.352955 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-run-httpd\") pod \"e6f9da38-454d-445a-9cf7-4c24b425602e\" (UID: \"e6f9da38-454d-445a-9cf7-4c24b425602e\") " Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.353721 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e6f9da38-454d-445a-9cf7-4c24b425602e" (UID: "e6f9da38-454d-445a-9cf7-4c24b425602e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.354828 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e6f9da38-454d-445a-9cf7-4c24b425602e" (UID: "e6f9da38-454d-445a-9cf7-4c24b425602e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.359838 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6f9da38-454d-445a-9cf7-4c24b425602e-kube-api-access-pwpkw" (OuterVolumeSpecName: "kube-api-access-pwpkw") pod "e6f9da38-454d-445a-9cf7-4c24b425602e" (UID: "e6f9da38-454d-445a-9cf7-4c24b425602e"). InnerVolumeSpecName "kube-api-access-pwpkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.360317 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-scripts" (OuterVolumeSpecName: "scripts") pod "e6f9da38-454d-445a-9cf7-4c24b425602e" (UID: "e6f9da38-454d-445a-9cf7-4c24b425602e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.383116 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e6f9da38-454d-445a-9cf7-4c24b425602e" (UID: "e6f9da38-454d-445a-9cf7-4c24b425602e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.429786 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6f9da38-454d-445a-9cf7-4c24b425602e" (UID: "e6f9da38-454d-445a-9cf7-4c24b425602e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.455199 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.455240 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.455254 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwpkw\" (UniqueName: \"kubernetes.io/projected/e6f9da38-454d-445a-9cf7-4c24b425602e-kube-api-access-pwpkw\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.455267 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.455280 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.455291 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6f9da38-454d-445a-9cf7-4c24b425602e-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.470260 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-config-data" (OuterVolumeSpecName: "config-data") pod "e6f9da38-454d-445a-9cf7-4c24b425602e" (UID: "e6f9da38-454d-445a-9cf7-4c24b425602e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.556561 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6f9da38-454d-445a-9cf7-4c24b425602e-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.761115 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6f9da38-454d-445a-9cf7-4c24b425602e","Type":"ContainerDied","Data":"00f02e304b9e577ee6a101d1809dfc1da9bd44a7b3bc9728c72688ef0ab4e3d7"} Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.761168 4755 scope.go:117] "RemoveContainer" containerID="77939ae4247afdab8d0ff8a63f902edd0505c40845933732ae526da4d985dd49" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.761188 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.786252 4755 scope.go:117] "RemoveContainer" containerID="7a6ef394f7aa0d91698b5d5610268191bd571fcb3803effa7df15bf87b4fdf0c" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.792704 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.805146 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.825660 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:23 crc kubenswrapper[4755]: E1124 01:30:23.826124 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="ceilometer-notification-agent" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.826147 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="ceilometer-notification-agent" Nov 24 01:30:23 crc kubenswrapper[4755]: E1124 01:30:23.826174 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="proxy-httpd" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.826185 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="proxy-httpd" Nov 24 01:30:23 crc kubenswrapper[4755]: E1124 01:30:23.826227 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="sg-core" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.826236 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="sg-core" Nov 24 01:30:23 crc kubenswrapper[4755]: E1124 01:30:23.826254 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="ceilometer-central-agent" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.826264 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="ceilometer-central-agent" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.826500 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="sg-core" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.826528 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="proxy-httpd" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.826541 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="ceilometer-notification-agent" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.826555 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" containerName="ceilometer-central-agent" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.827641 4755 scope.go:117] "RemoveContainer" containerID="7009ef781687154ed24fc6c270759033b86caad48723701ddd7cad985f6625c5" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.828852 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.830639 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.834087 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.843966 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.861317 4755 scope.go:117] "RemoveContainer" containerID="a92f1d26cfff10b0cb2ad7a6573b2c5e09cfc8d425ac085227f4051bc15f32e3" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.963527 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-scripts\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.963636 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6mzr\" (UniqueName: \"kubernetes.io/projected/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-kube-api-access-w6mzr\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.963682 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-run-httpd\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.963737 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.963798 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.963843 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-log-httpd\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:23 crc kubenswrapper[4755]: I1124 01:30:23.963949 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-config-data\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.014746 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6f9da38-454d-445a-9cf7-4c24b425602e" path="/var/lib/kubelet/pods/e6f9da38-454d-445a-9cf7-4c24b425602e/volumes" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.065287 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-scripts\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.065334 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6mzr\" (UniqueName: \"kubernetes.io/projected/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-kube-api-access-w6mzr\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.065363 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-run-httpd\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.065400 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.065436 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.065457 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-log-httpd\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.065511 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-config-data\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.066147 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-run-httpd\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.066450 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-log-httpd\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.070839 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-config-data\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.071381 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-scripts\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.072141 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.073770 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.087168 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6mzr\" (UniqueName: \"kubernetes.io/projected/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-kube-api-access-w6mzr\") pod \"ceilometer-0\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.158302 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.612286 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:24 crc kubenswrapper[4755]: I1124 01:30:24.776202 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5","Type":"ContainerStarted","Data":"b3f39d1c39d96853eb2684fb0dfe516e6e4820d9ac769d21f5306759df436ef9"} Nov 24 01:30:25 crc kubenswrapper[4755]: I1124 01:30:25.138943 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:25 crc kubenswrapper[4755]: I1124 01:30:25.790254 4755 generic.go:334] "Generic (PLEG): container finished" podID="8965375e-ebca-4829-8445-54dabd02845f" containerID="c317437f3833ffa9c4bba705ee2105f6533d6147f55bd6b859fe9bdc40389d93" exitCode=0 Nov 24 01:30:25 crc kubenswrapper[4755]: I1124 01:30:25.790845 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hqx5z" event={"ID":"8965375e-ebca-4829-8445-54dabd02845f","Type":"ContainerDied","Data":"c317437f3833ffa9c4bba705ee2105f6533d6147f55bd6b859fe9bdc40389d93"} Nov 24 01:30:25 crc kubenswrapper[4755]: I1124 01:30:25.797264 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5","Type":"ContainerStarted","Data":"a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52"} Nov 24 01:30:26 crc kubenswrapper[4755]: I1124 01:30:26.809985 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5","Type":"ContainerStarted","Data":"39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32"} Nov 24 01:30:26 crc kubenswrapper[4755]: I1124 01:30:26.810388 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5","Type":"ContainerStarted","Data":"ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f"} Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.156186 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.350492 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-scripts\") pod \"8965375e-ebca-4829-8445-54dabd02845f\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.351058 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-config-data\") pod \"8965375e-ebca-4829-8445-54dabd02845f\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.351312 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nrk4\" (UniqueName: \"kubernetes.io/projected/8965375e-ebca-4829-8445-54dabd02845f-kube-api-access-2nrk4\") pod \"8965375e-ebca-4829-8445-54dabd02845f\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.351511 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-combined-ca-bundle\") pod \"8965375e-ebca-4829-8445-54dabd02845f\" (UID: \"8965375e-ebca-4829-8445-54dabd02845f\") " Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.355934 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-scripts" (OuterVolumeSpecName: "scripts") pod "8965375e-ebca-4829-8445-54dabd02845f" (UID: "8965375e-ebca-4829-8445-54dabd02845f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.360350 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8965375e-ebca-4829-8445-54dabd02845f-kube-api-access-2nrk4" (OuterVolumeSpecName: "kube-api-access-2nrk4") pod "8965375e-ebca-4829-8445-54dabd02845f" (UID: "8965375e-ebca-4829-8445-54dabd02845f"). InnerVolumeSpecName "kube-api-access-2nrk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.375302 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-config-data" (OuterVolumeSpecName: "config-data") pod "8965375e-ebca-4829-8445-54dabd02845f" (UID: "8965375e-ebca-4829-8445-54dabd02845f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.400706 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8965375e-ebca-4829-8445-54dabd02845f" (UID: "8965375e-ebca-4829-8445-54dabd02845f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.454355 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.454412 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.454433 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nrk4\" (UniqueName: \"kubernetes.io/projected/8965375e-ebca-4829-8445-54dabd02845f-kube-api-access-2nrk4\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.454453 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8965375e-ebca-4829-8445-54dabd02845f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.822449 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hqx5z" event={"ID":"8965375e-ebca-4829-8445-54dabd02845f","Type":"ContainerDied","Data":"31c22c7941a4c75e9bd953e5fd209c1c196d7f5973f4eeabe42dffe31d796923"} Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.823575 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31c22c7941a4c75e9bd953e5fd209c1c196d7f5973f4eeabe42dffe31d796923" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.823377 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hqx5z" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.920368 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 24 01:30:27 crc kubenswrapper[4755]: E1124 01:30:27.920875 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8965375e-ebca-4829-8445-54dabd02845f" containerName="nova-cell0-conductor-db-sync" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.920896 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="8965375e-ebca-4829-8445-54dabd02845f" containerName="nova-cell0-conductor-db-sync" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.921089 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="8965375e-ebca-4829-8445-54dabd02845f" containerName="nova-cell0-conductor-db-sync" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.921756 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.923929 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.923960 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-t8zjp" Nov 24 01:30:27 crc kubenswrapper[4755]: I1124 01:30:27.942552 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.061745 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3b1705c-2c0d-4bd6-b928-87a6a105cb4d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d\") " pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.062129 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3b1705c-2c0d-4bd6-b928-87a6a105cb4d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d\") " pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.062215 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jnwd\" (UniqueName: \"kubernetes.io/projected/d3b1705c-2c0d-4bd6-b928-87a6a105cb4d-kube-api-access-2jnwd\") pod \"nova-cell0-conductor-0\" (UID: \"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d\") " pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.163298 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3b1705c-2c0d-4bd6-b928-87a6a105cb4d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d\") " pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.163353 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3b1705c-2c0d-4bd6-b928-87a6a105cb4d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d\") " pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.163439 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jnwd\" (UniqueName: \"kubernetes.io/projected/d3b1705c-2c0d-4bd6-b928-87a6a105cb4d-kube-api-access-2jnwd\") pod \"nova-cell0-conductor-0\" (UID: \"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d\") " pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.178003 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3b1705c-2c0d-4bd6-b928-87a6a105cb4d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d\") " pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.178153 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3b1705c-2c0d-4bd6-b928-87a6a105cb4d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d\") " pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.183070 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jnwd\" (UniqueName: \"kubernetes.io/projected/d3b1705c-2c0d-4bd6-b928-87a6a105cb4d-kube-api-access-2jnwd\") pod \"nova-cell0-conductor-0\" (UID: \"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d\") " pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.249246 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.708853 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 24 01:30:28 crc kubenswrapper[4755]: W1124 01:30:28.721774 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3b1705c_2c0d_4bd6_b928_87a6a105cb4d.slice/crio-cd522436447d89cf93d30076ab08a623fc8c479d34e22934ed0b749b3154001d WatchSource:0}: Error finding container cd522436447d89cf93d30076ab08a623fc8c479d34e22934ed0b749b3154001d: Status 404 returned error can't find the container with id cd522436447d89cf93d30076ab08a623fc8c479d34e22934ed0b749b3154001d Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.835587 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d","Type":"ContainerStarted","Data":"cd522436447d89cf93d30076ab08a623fc8c479d34e22934ed0b749b3154001d"} Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.838974 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5","Type":"ContainerStarted","Data":"9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1"} Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.839205 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="ceilometer-central-agent" containerID="cri-o://a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52" gracePeriod=30 Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.839263 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="proxy-httpd" containerID="cri-o://9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1" gracePeriod=30 Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.839325 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.839328 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="ceilometer-notification-agent" containerID="cri-o://39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32" gracePeriod=30 Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.839343 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="sg-core" containerID="cri-o://ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f" gracePeriod=30 Nov 24 01:30:28 crc kubenswrapper[4755]: I1124 01:30:28.869717 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.8447583119999997 podStartE2EDuration="5.869695744s" podCreationTimestamp="2025-11-24 01:30:23 +0000 UTC" firstStartedPulling="2025-11-24 01:30:24.631800854 +0000 UTC m=+1049.317866375" lastFinishedPulling="2025-11-24 01:30:27.656738306 +0000 UTC m=+1052.342803807" observedRunningTime="2025-11-24 01:30:28.864244833 +0000 UTC m=+1053.550310334" watchObservedRunningTime="2025-11-24 01:30:28.869695744 +0000 UTC m=+1053.555761245" Nov 24 01:30:29 crc kubenswrapper[4755]: I1124 01:30:29.858214 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d3b1705c-2c0d-4bd6-b928-87a6a105cb4d","Type":"ContainerStarted","Data":"8f0172f01ea858346468ccdb613e292db97a443d42009b742ae391ab526e2729"} Nov 24 01:30:29 crc kubenswrapper[4755]: I1124 01:30:29.859095 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:29 crc kubenswrapper[4755]: I1124 01:30:29.863187 4755 generic.go:334] "Generic (PLEG): container finished" podID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerID="9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1" exitCode=0 Nov 24 01:30:29 crc kubenswrapper[4755]: I1124 01:30:29.863223 4755 generic.go:334] "Generic (PLEG): container finished" podID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerID="ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f" exitCode=2 Nov 24 01:30:29 crc kubenswrapper[4755]: I1124 01:30:29.863235 4755 generic.go:334] "Generic (PLEG): container finished" podID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerID="39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32" exitCode=0 Nov 24 01:30:29 crc kubenswrapper[4755]: I1124 01:30:29.863263 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5","Type":"ContainerDied","Data":"9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1"} Nov 24 01:30:29 crc kubenswrapper[4755]: I1124 01:30:29.863296 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5","Type":"ContainerDied","Data":"ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f"} Nov 24 01:30:29 crc kubenswrapper[4755]: I1124 01:30:29.863310 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5","Type":"ContainerDied","Data":"39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32"} Nov 24 01:30:29 crc kubenswrapper[4755]: I1124 01:30:29.891796 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.891771153 podStartE2EDuration="2.891771153s" podCreationTimestamp="2025-11-24 01:30:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:29.889005867 +0000 UTC m=+1054.575071438" watchObservedRunningTime="2025-11-24 01:30:29.891771153 +0000 UTC m=+1054.577836684" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.347982 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.472780 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6mzr\" (UniqueName: \"kubernetes.io/projected/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-kube-api-access-w6mzr\") pod \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.472933 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-run-httpd\") pod \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.473094 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-config-data\") pod \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.473135 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-sg-core-conf-yaml\") pod \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.473159 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-scripts\") pod \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.473226 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-log-httpd\") pod \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.473249 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-combined-ca-bundle\") pod \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\" (UID: \"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5\") " Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.473375 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" (UID: "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.473793 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.473895 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" (UID: "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.478488 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-kube-api-access-w6mzr" (OuterVolumeSpecName: "kube-api-access-w6mzr") pod "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" (UID: "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5"). InnerVolumeSpecName "kube-api-access-w6mzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.479627 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-scripts" (OuterVolumeSpecName: "scripts") pod "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" (UID: "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.498683 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" (UID: "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.573789 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" (UID: "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.575027 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.575058 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.575071 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.575084 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.575096 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6mzr\" (UniqueName: \"kubernetes.io/projected/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-kube-api-access-w6mzr\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.593375 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-config-data" (OuterVolumeSpecName: "config-data") pod "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" (UID: "7edf21cd-ec8b-4e53-a497-7f43ee38a3b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.676831 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.895933 4755 generic.go:334] "Generic (PLEG): container finished" podID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerID="a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52" exitCode=0 Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.896015 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.896014 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5","Type":"ContainerDied","Data":"a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52"} Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.896640 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7edf21cd-ec8b-4e53-a497-7f43ee38a3b5","Type":"ContainerDied","Data":"b3f39d1c39d96853eb2684fb0dfe516e6e4820d9ac769d21f5306759df436ef9"} Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.896665 4755 scope.go:117] "RemoveContainer" containerID="9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.936712 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.939898 4755 scope.go:117] "RemoveContainer" containerID="ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.955404 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.968020 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:32 crc kubenswrapper[4755]: E1124 01:30:32.968710 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="proxy-httpd" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.968842 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="proxy-httpd" Nov 24 01:30:32 crc kubenswrapper[4755]: E1124 01:30:32.968988 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="ceilometer-notification-agent" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.969077 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="ceilometer-notification-agent" Nov 24 01:30:32 crc kubenswrapper[4755]: E1124 01:30:32.969165 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="ceilometer-central-agent" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.969254 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="ceilometer-central-agent" Nov 24 01:30:32 crc kubenswrapper[4755]: E1124 01:30:32.969350 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="sg-core" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.969550 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="sg-core" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.969959 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="proxy-httpd" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.970090 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="ceilometer-notification-agent" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.970411 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="sg-core" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.970561 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" containerName="ceilometer-central-agent" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.991249 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:32 crc kubenswrapper[4755]: I1124 01:30:32.996981 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.002298 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.019037 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.034453 4755 scope.go:117] "RemoveContainer" containerID="39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.063284 4755 scope.go:117] "RemoveContainer" containerID="a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.084310 4755 scope.go:117] "RemoveContainer" containerID="9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1" Nov 24 01:30:33 crc kubenswrapper[4755]: E1124 01:30:33.084639 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1\": container with ID starting with 9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1 not found: ID does not exist" containerID="9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.084677 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1"} err="failed to get container status \"9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1\": rpc error: code = NotFound desc = could not find container \"9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1\": container with ID starting with 9aa9702f004acad4acc81e76911e9d208c532711de72f9610cecb4db02d638f1 not found: ID does not exist" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.084701 4755 scope.go:117] "RemoveContainer" containerID="ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f" Nov 24 01:30:33 crc kubenswrapper[4755]: E1124 01:30:33.085142 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f\": container with ID starting with ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f not found: ID does not exist" containerID="ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.085167 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f"} err="failed to get container status \"ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f\": rpc error: code = NotFound desc = could not find container \"ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f\": container with ID starting with ae1121152a0a17430a6b177a97e656b88ec364d5e4c060997adec2351e26e25f not found: ID does not exist" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.085184 4755 scope.go:117] "RemoveContainer" containerID="39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32" Nov 24 01:30:33 crc kubenswrapper[4755]: E1124 01:30:33.085616 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32\": container with ID starting with 39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32 not found: ID does not exist" containerID="39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.085641 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32"} err="failed to get container status \"39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32\": rpc error: code = NotFound desc = could not find container \"39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32\": container with ID starting with 39f15efc5f594d37218e7b0f4ee3feab016294c89479d36c4e9a59c8c87abe32 not found: ID does not exist" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.085656 4755 scope.go:117] "RemoveContainer" containerID="a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52" Nov 24 01:30:33 crc kubenswrapper[4755]: E1124 01:30:33.085833 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52\": container with ID starting with a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52 not found: ID does not exist" containerID="a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.085855 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52"} err="failed to get container status \"a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52\": rpc error: code = NotFound desc = could not find container \"a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52\": container with ID starting with a66ec590cf789c61a058125b88b0989163d216a084ea4d1ca659f92e51c21a52 not found: ID does not exist" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.091967 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.092005 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czx2h\" (UniqueName: \"kubernetes.io/projected/5de632b3-5130-4cd7-95e5-dbfde5d6738a-kube-api-access-czx2h\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.092033 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-log-httpd\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.092096 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-config-data\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.092118 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.092188 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-run-httpd\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.092214 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-scripts\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.193776 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czx2h\" (UniqueName: \"kubernetes.io/projected/5de632b3-5130-4cd7-95e5-dbfde5d6738a-kube-api-access-czx2h\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.193822 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-log-httpd\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.193912 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-config-data\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.193937 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.194055 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-run-httpd\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.194083 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-scripts\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.194183 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.194327 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-log-httpd\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.195320 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-run-httpd\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.199281 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.199989 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-scripts\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.200527 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.211738 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czx2h\" (UniqueName: \"kubernetes.io/projected/5de632b3-5130-4cd7-95e5-dbfde5d6738a-kube-api-access-czx2h\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.216273 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-config-data\") pod \"ceilometer-0\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.282478 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.352795 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.715061 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-xrqj7"] Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.716736 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.720007 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.720276 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.725990 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-xrqj7"] Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.819195 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-scripts\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.819378 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-config-data\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.819434 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.819670 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snnwj\" (UniqueName: \"kubernetes.io/projected/6c211ce7-93af-43de-ab63-044b93a27473-kube-api-access-snnwj\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: W1124 01:30:33.820726 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5de632b3_5130_4cd7_95e5_dbfde5d6738a.slice/crio-ba3b312866cfaf2929e718a5a0c21e243b8cf6cab7e42ad56ef9298fb8340127 WatchSource:0}: Error finding container ba3b312866cfaf2929e718a5a0c21e243b8cf6cab7e42ad56ef9298fb8340127: Status 404 returned error can't find the container with id ba3b312866cfaf2929e718a5a0c21e243b8cf6cab7e42ad56ef9298fb8340127 Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.823537 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.876777 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.879305 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.884531 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.889856 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.921910 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-scripts\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.921961 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.921994 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwsnt\" (UniqueName: \"kubernetes.io/projected/fc9a95c5-01c8-4034-a79e-0d1714219a17-kube-api-access-dwsnt\") pod \"nova-scheduler-0\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.922026 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-config-data\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.922047 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-config-data\") pod \"nova-scheduler-0\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.922085 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.922119 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snnwj\" (UniqueName: \"kubernetes.io/projected/6c211ce7-93af-43de-ab63-044b93a27473-kube-api-access-snnwj\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.928688 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5de632b3-5130-4cd7-95e5-dbfde5d6738a","Type":"ContainerStarted","Data":"ba3b312866cfaf2929e718a5a0c21e243b8cf6cab7e42ad56ef9298fb8340127"} Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.929119 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-scripts\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.938533 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-config-data\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.939793 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.954452 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snnwj\" (UniqueName: \"kubernetes.io/projected/6c211ce7-93af-43de-ab63-044b93a27473-kube-api-access-snnwj\") pod \"nova-cell0-cell-mapping-xrqj7\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.962741 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.965702 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.971878 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.985010 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.986576 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:30:33 crc kubenswrapper[4755]: I1124 01:30:33.989347 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:33.999173 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.035943 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.036015 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-config-data\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.036052 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct9vr\" (UniqueName: \"kubernetes.io/projected/f1d611ec-f141-4525-b307-89e15d639897-kube-api-access-ct9vr\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.036073 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwsnt\" (UniqueName: \"kubernetes.io/projected/fc9a95c5-01c8-4034-a79e-0d1714219a17-kube-api-access-dwsnt\") pod \"nova-scheduler-0\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.036098 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/994b82d0-9135-4cdb-89d8-b772fe5ba79c-logs\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.036129 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-config-data\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.036177 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-config-data\") pod \"nova-scheduler-0\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.036313 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.036351 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.036463 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8sc6\" (UniqueName: \"kubernetes.io/projected/994b82d0-9135-4cdb-89d8-b772fe5ba79c-kube-api-access-w8sc6\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.036500 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d611ec-f141-4525-b307-89e15d639897-logs\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.042521 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7edf21cd-ec8b-4e53-a497-7f43ee38a3b5" path="/var/lib/kubelet/pods/7edf21cd-ec8b-4e53-a497-7f43ee38a3b5/volumes" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.043514 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.046188 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.056230 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-config-data\") pod \"nova-scheduler-0\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.071632 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.072534 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwsnt\" (UniqueName: \"kubernetes.io/projected/fc9a95c5-01c8-4034-a79e-0d1714219a17-kube-api-access-dwsnt\") pod \"nova-scheduler-0\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.137781 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8sc6\" (UniqueName: \"kubernetes.io/projected/994b82d0-9135-4cdb-89d8-b772fe5ba79c-kube-api-access-w8sc6\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.137824 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d611ec-f141-4525-b307-89e15d639897-logs\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.137897 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-config-data\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.137922 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct9vr\" (UniqueName: \"kubernetes.io/projected/f1d611ec-f141-4525-b307-89e15d639897-kube-api-access-ct9vr\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.137939 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/994b82d0-9135-4cdb-89d8-b772fe5ba79c-logs\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.137961 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-config-data\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.138030 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.138047 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.143207 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d611ec-f141-4525-b307-89e15d639897-logs\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.157521 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.160205 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/994b82d0-9135-4cdb-89d8-b772fe5ba79c-logs\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.162654 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.169466 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-config-data\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.170085 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-config-data\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.179215 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.180488 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.183096 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.200923 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8sc6\" (UniqueName: \"kubernetes.io/projected/994b82d0-9135-4cdb-89d8-b772fe5ba79c-kube-api-access-w8sc6\") pod \"nova-metadata-0\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.201649 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct9vr\" (UniqueName: \"kubernetes.io/projected/f1d611ec-f141-4525-b307-89e15d639897-kube-api-access-ct9vr\") pod \"nova-api-0\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.201679 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.208113 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.208908 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.221193 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.221388 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-f9x2z"] Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.233693 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.238834 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-f9x2z"] Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.242250 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.242665 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.242887 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgjpm\" (UniqueName: \"kubernetes.io/projected/8647003e-d511-4e5b-9fe4-86049ef105e8-kube-api-access-wgjpm\") pod \"nova-cell1-novncproxy-0\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.344447 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.344738 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.344766 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.344884 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-config\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.344954 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.344973 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgjpm\" (UniqueName: \"kubernetes.io/projected/8647003e-d511-4e5b-9fe4-86049ef105e8-kube-api-access-wgjpm\") pod \"nova-cell1-novncproxy-0\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.345032 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npjnb\" (UniqueName: \"kubernetes.io/projected/80d1eac3-fd1d-43f2-ad80-91b0910244a1-kube-api-access-npjnb\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.345071 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.345118 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.350333 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.350858 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.375140 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgjpm\" (UniqueName: \"kubernetes.io/projected/8647003e-d511-4e5b-9fe4-86049ef105e8-kube-api-access-wgjpm\") pod \"nova-cell1-novncproxy-0\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.448181 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npjnb\" (UniqueName: \"kubernetes.io/projected/80d1eac3-fd1d-43f2-ad80-91b0910244a1-kube-api-access-npjnb\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.448238 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.448277 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.448310 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.448335 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.448386 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-config\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.449254 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-config\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.450205 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.451076 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.459281 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.451595 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.477252 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npjnb\" (UniqueName: \"kubernetes.io/projected/80d1eac3-fd1d-43f2-ad80-91b0910244a1-kube-api-access-npjnb\") pod \"dnsmasq-dns-845d6d6f59-f9x2z\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.565219 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.575821 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.778210 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-xrqj7"] Nov 24 01:30:34 crc kubenswrapper[4755]: W1124 01:30:34.888253 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1d611ec_f141_4525_b307_89e15d639897.slice/crio-fdab63ae680e4f9e3094ab7ba8e02a1d7d29a336207a45fe5b35fba45a09b72a WatchSource:0}: Error finding container fdab63ae680e4f9e3094ab7ba8e02a1d7d29a336207a45fe5b35fba45a09b72a: Status 404 returned error can't find the container with id fdab63ae680e4f9e3094ab7ba8e02a1d7d29a336207a45fe5b35fba45a09b72a Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.893323 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.914442 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.923156 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hggks"] Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.925597 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.928447 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.928802 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.937414 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hggks"] Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.975161 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.975733 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f1d611ec-f141-4525-b307-89e15d639897","Type":"ContainerStarted","Data":"fdab63ae680e4f9e3094ab7ba8e02a1d7d29a336207a45fe5b35fba45a09b72a"} Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.978200 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blkfn\" (UniqueName: \"kubernetes.io/projected/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-kube-api-access-blkfn\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.978242 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-config-data\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.978383 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-scripts\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.979786 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"994b82d0-9135-4cdb-89d8-b772fe5ba79c","Type":"ContainerStarted","Data":"e25334ce873c61ed6960233e4f4c1aac5314434b56ab34141f35a4393a45bac5"} Nov 24 01:30:34 crc kubenswrapper[4755]: I1124 01:30:34.996428 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5de632b3-5130-4cd7-95e5-dbfde5d6738a","Type":"ContainerStarted","Data":"a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271"} Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.045143 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xrqj7" event={"ID":"6c211ce7-93af-43de-ab63-044b93a27473","Type":"ContainerStarted","Data":"dae15ef328aa3cd4e4559636aff625c98d36b31d14413582aef3b8be70a63c8a"} Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.051690 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:30:35 crc kubenswrapper[4755]: W1124 01:30:35.078199 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc9a95c5_01c8_4034_a79e_0d1714219a17.slice/crio-75f35531917fdda4a8d0efd6e08bdee10f189c74fd9fac032cc2628275ff72de WatchSource:0}: Error finding container 75f35531917fdda4a8d0efd6e08bdee10f189c74fd9fac032cc2628275ff72de: Status 404 returned error can't find the container with id 75f35531917fdda4a8d0efd6e08bdee10f189c74fd9fac032cc2628275ff72de Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.079783 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-scripts\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.079940 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.080499 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blkfn\" (UniqueName: \"kubernetes.io/projected/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-kube-api-access-blkfn\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.080530 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-config-data\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.088226 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-scripts\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.089357 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-config-data\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.091694 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.100264 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blkfn\" (UniqueName: \"kubernetes.io/projected/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-kube-api-access-blkfn\") pod \"nova-cell1-conductor-db-sync-hggks\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.165131 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.202991 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.232405 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-f9x2z"] Nov 24 01:30:35 crc kubenswrapper[4755]: I1124 01:30:35.810118 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hggks"] Nov 24 01:30:35 crc kubenswrapper[4755]: W1124 01:30:35.817067 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2efb1e5a_1455_4b1e_892e_ee86d4fdf50b.slice/crio-2f83cd0b38de998a838f24e23147c4d0a9a1627c7e82da88a8fedbc87f03c385 WatchSource:0}: Error finding container 2f83cd0b38de998a838f24e23147c4d0a9a1627c7e82da88a8fedbc87f03c385: Status 404 returned error can't find the container with id 2f83cd0b38de998a838f24e23147c4d0a9a1627c7e82da88a8fedbc87f03c385 Nov 24 01:30:36 crc kubenswrapper[4755]: I1124 01:30:36.080786 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fc9a95c5-01c8-4034-a79e-0d1714219a17","Type":"ContainerStarted","Data":"75f35531917fdda4a8d0efd6e08bdee10f189c74fd9fac032cc2628275ff72de"} Nov 24 01:30:36 crc kubenswrapper[4755]: I1124 01:30:36.087595 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8647003e-d511-4e5b-9fe4-86049ef105e8","Type":"ContainerStarted","Data":"599c3ef7517e82e096d2ff4ea0221c6eaf2c25075a771bc897ee99d4d6ce2e5a"} Nov 24 01:30:36 crc kubenswrapper[4755]: I1124 01:30:36.094077 4755 generic.go:334] "Generic (PLEG): container finished" podID="80d1eac3-fd1d-43f2-ad80-91b0910244a1" containerID="a859722809f291b6b2b1455711449198c25a357ea3f20c6d871d63fd9f4d4af2" exitCode=0 Nov 24 01:30:36 crc kubenswrapper[4755]: I1124 01:30:36.094140 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" event={"ID":"80d1eac3-fd1d-43f2-ad80-91b0910244a1","Type":"ContainerDied","Data":"a859722809f291b6b2b1455711449198c25a357ea3f20c6d871d63fd9f4d4af2"} Nov 24 01:30:36 crc kubenswrapper[4755]: I1124 01:30:36.094167 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" event={"ID":"80d1eac3-fd1d-43f2-ad80-91b0910244a1","Type":"ContainerStarted","Data":"ea62ef234c0b09ee1881ae0e64545021c6ba430102f9b15eaa42f1fb6c813b1a"} Nov 24 01:30:36 crc kubenswrapper[4755]: I1124 01:30:36.098132 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5de632b3-5130-4cd7-95e5-dbfde5d6738a","Type":"ContainerStarted","Data":"2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275"} Nov 24 01:30:36 crc kubenswrapper[4755]: I1124 01:30:36.101883 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hggks" event={"ID":"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b","Type":"ContainerStarted","Data":"2f83cd0b38de998a838f24e23147c4d0a9a1627c7e82da88a8fedbc87f03c385"} Nov 24 01:30:36 crc kubenswrapper[4755]: I1124 01:30:36.111370 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xrqj7" event={"ID":"6c211ce7-93af-43de-ab63-044b93a27473","Type":"ContainerStarted","Data":"7d17fdd1cec8829e48d38f4960e052fcc869568b66f24d506522812892416037"} Nov 24 01:30:36 crc kubenswrapper[4755]: I1124 01:30:36.277193 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-xrqj7" podStartSLOduration=3.277175186 podStartE2EDuration="3.277175186s" podCreationTimestamp="2025-11-24 01:30:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:36.256768222 +0000 UTC m=+1060.942833723" watchObservedRunningTime="2025-11-24 01:30:36.277175186 +0000 UTC m=+1060.963240687" Nov 24 01:30:37 crc kubenswrapper[4755]: I1124 01:30:37.121961 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5de632b3-5130-4cd7-95e5-dbfde5d6738a","Type":"ContainerStarted","Data":"a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511"} Nov 24 01:30:37 crc kubenswrapper[4755]: I1124 01:30:37.123059 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hggks" event={"ID":"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b","Type":"ContainerStarted","Data":"04d269e7b1d1837f19b589cec6c6436f35986ca8e4dafe52f5060dc15a29a6fe"} Nov 24 01:30:37 crc kubenswrapper[4755]: I1124 01:30:37.124701 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" event={"ID":"80d1eac3-fd1d-43f2-ad80-91b0910244a1","Type":"ContainerStarted","Data":"a098cf926adcc72d49de933224c9783012efd690f6c066af693c16e3e115a5bd"} Nov 24 01:30:37 crc kubenswrapper[4755]: I1124 01:30:37.124836 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:37 crc kubenswrapper[4755]: I1124 01:30:37.146678 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-hggks" podStartSLOduration=3.146656245 podStartE2EDuration="3.146656245s" podCreationTimestamp="2025-11-24 01:30:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:37.135947129 +0000 UTC m=+1061.822012640" watchObservedRunningTime="2025-11-24 01:30:37.146656245 +0000 UTC m=+1061.832721746" Nov 24 01:30:37 crc kubenswrapper[4755]: I1124 01:30:37.709542 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" podStartSLOduration=3.709517815 podStartE2EDuration="3.709517815s" podCreationTimestamp="2025-11-24 01:30:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:37.161289699 +0000 UTC m=+1061.847355220" watchObservedRunningTime="2025-11-24 01:30:37.709517815 +0000 UTC m=+1062.395583326" Nov 24 01:30:37 crc kubenswrapper[4755]: I1124 01:30:37.718528 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:37 crc kubenswrapper[4755]: I1124 01:30:37.737226 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.143794 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8647003e-d511-4e5b-9fe4-86049ef105e8","Type":"ContainerStarted","Data":"8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2"} Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.144339 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="8647003e-d511-4e5b-9fe4-86049ef105e8" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2" gracePeriod=30 Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.147492 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f1d611ec-f141-4525-b307-89e15d639897","Type":"ContainerStarted","Data":"8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b"} Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.147652 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f1d611ec-f141-4525-b307-89e15d639897","Type":"ContainerStarted","Data":"12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6"} Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.152799 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"994b82d0-9135-4cdb-89d8-b772fe5ba79c","Type":"ContainerStarted","Data":"b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7"} Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.154723 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5de632b3-5130-4cd7-95e5-dbfde5d6738a","Type":"ContainerStarted","Data":"12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff"} Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.155716 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.158652 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fc9a95c5-01c8-4034-a79e-0d1714219a17","Type":"ContainerStarted","Data":"e6b8138bdc67e42a0127fd3ae8a66bc36df0abb3ddd7845b46097c139c12808a"} Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.172545 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.802021806 podStartE2EDuration="5.172526672s" podCreationTimestamp="2025-11-24 01:30:34 +0000 UTC" firstStartedPulling="2025-11-24 01:30:35.180397024 +0000 UTC m=+1059.866462515" lastFinishedPulling="2025-11-24 01:30:38.55090184 +0000 UTC m=+1063.236967381" observedRunningTime="2025-11-24 01:30:39.162695611 +0000 UTC m=+1063.848761112" watchObservedRunningTime="2025-11-24 01:30:39.172526672 +0000 UTC m=+1063.858592173" Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.192170 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.535537562 podStartE2EDuration="6.192154363s" podCreationTimestamp="2025-11-24 01:30:33 +0000 UTC" firstStartedPulling="2025-11-24 01:30:34.897508798 +0000 UTC m=+1059.583574299" lastFinishedPulling="2025-11-24 01:30:38.554125579 +0000 UTC m=+1063.240191100" observedRunningTime="2025-11-24 01:30:39.185848829 +0000 UTC m=+1063.871914330" watchObservedRunningTime="2025-11-24 01:30:39.192154363 +0000 UTC m=+1063.878219864" Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.203067 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.7333048189999998 podStartE2EDuration="6.203050834s" podCreationTimestamp="2025-11-24 01:30:33 +0000 UTC" firstStartedPulling="2025-11-24 01:30:35.083714486 +0000 UTC m=+1059.769779987" lastFinishedPulling="2025-11-24 01:30:38.553460501 +0000 UTC m=+1063.239526002" observedRunningTime="2025-11-24 01:30:39.198475408 +0000 UTC m=+1063.884540919" watchObservedRunningTime="2025-11-24 01:30:39.203050834 +0000 UTC m=+1063.889116335" Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.209664 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 24 01:30:39 crc kubenswrapper[4755]: I1124 01:30:39.566160 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.172217 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"994b82d0-9135-4cdb-89d8-b772fe5ba79c","Type":"ContainerStarted","Data":"5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b"} Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.172509 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" containerName="nova-metadata-log" containerID="cri-o://b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7" gracePeriod=30 Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.172575 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" containerName="nova-metadata-metadata" containerID="cri-o://5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b" gracePeriod=30 Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.202739 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.462817005 podStartE2EDuration="8.202716335s" podCreationTimestamp="2025-11-24 01:30:32 +0000 UTC" firstStartedPulling="2025-11-24 01:30:33.822534398 +0000 UTC m=+1058.508599899" lastFinishedPulling="2025-11-24 01:30:38.562433718 +0000 UTC m=+1063.248499229" observedRunningTime="2025-11-24 01:30:39.221641997 +0000 UTC m=+1063.907707508" watchObservedRunningTime="2025-11-24 01:30:40.202716335 +0000 UTC m=+1064.888781846" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.215686 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.603794997 podStartE2EDuration="7.215671203s" podCreationTimestamp="2025-11-24 01:30:33 +0000 UTC" firstStartedPulling="2025-11-24 01:30:34.903138984 +0000 UTC m=+1059.589204485" lastFinishedPulling="2025-11-24 01:30:38.51501519 +0000 UTC m=+1063.201080691" observedRunningTime="2025-11-24 01:30:40.199853146 +0000 UTC m=+1064.885918667" watchObservedRunningTime="2025-11-24 01:30:40.215671203 +0000 UTC m=+1064.901736704" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.721788 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.839529 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-combined-ca-bundle\") pod \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.839626 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-config-data\") pod \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.839708 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8sc6\" (UniqueName: \"kubernetes.io/projected/994b82d0-9135-4cdb-89d8-b772fe5ba79c-kube-api-access-w8sc6\") pod \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.839853 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/994b82d0-9135-4cdb-89d8-b772fe5ba79c-logs\") pod \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\" (UID: \"994b82d0-9135-4cdb-89d8-b772fe5ba79c\") " Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.840205 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/994b82d0-9135-4cdb-89d8-b772fe5ba79c-logs" (OuterVolumeSpecName: "logs") pod "994b82d0-9135-4cdb-89d8-b772fe5ba79c" (UID: "994b82d0-9135-4cdb-89d8-b772fe5ba79c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.841031 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/994b82d0-9135-4cdb-89d8-b772fe5ba79c-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.849974 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/994b82d0-9135-4cdb-89d8-b772fe5ba79c-kube-api-access-w8sc6" (OuterVolumeSpecName: "kube-api-access-w8sc6") pod "994b82d0-9135-4cdb-89d8-b772fe5ba79c" (UID: "994b82d0-9135-4cdb-89d8-b772fe5ba79c"). InnerVolumeSpecName "kube-api-access-w8sc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.872175 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "994b82d0-9135-4cdb-89d8-b772fe5ba79c" (UID: "994b82d0-9135-4cdb-89d8-b772fe5ba79c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.880228 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-config-data" (OuterVolumeSpecName: "config-data") pod "994b82d0-9135-4cdb-89d8-b772fe5ba79c" (UID: "994b82d0-9135-4cdb-89d8-b772fe5ba79c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.942507 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.942539 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994b82d0-9135-4cdb-89d8-b772fe5ba79c-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:40 crc kubenswrapper[4755]: I1124 01:30:40.942548 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8sc6\" (UniqueName: \"kubernetes.io/projected/994b82d0-9135-4cdb-89d8-b772fe5ba79c-kube-api-access-w8sc6\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.183863 4755 generic.go:334] "Generic (PLEG): container finished" podID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" containerID="5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b" exitCode=0 Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.183898 4755 generic.go:334] "Generic (PLEG): container finished" podID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" containerID="b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7" exitCode=143 Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.183960 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"994b82d0-9135-4cdb-89d8-b772fe5ba79c","Type":"ContainerDied","Data":"5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b"} Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.184025 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"994b82d0-9135-4cdb-89d8-b772fe5ba79c","Type":"ContainerDied","Data":"b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7"} Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.184032 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.184049 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"994b82d0-9135-4cdb-89d8-b772fe5ba79c","Type":"ContainerDied","Data":"e25334ce873c61ed6960233e4f4c1aac5314434b56ab34141f35a4393a45bac5"} Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.184077 4755 scope.go:117] "RemoveContainer" containerID="5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.217199 4755 scope.go:117] "RemoveContainer" containerID="b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.234762 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.244991 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.260579 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:41 crc kubenswrapper[4755]: E1124 01:30:41.261364 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" containerName="nova-metadata-metadata" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.261383 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" containerName="nova-metadata-metadata" Nov 24 01:30:41 crc kubenswrapper[4755]: E1124 01:30:41.261393 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" containerName="nova-metadata-log" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.261399 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" containerName="nova-metadata-log" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.261701 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" containerName="nova-metadata-metadata" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.261724 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" containerName="nova-metadata-log" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.262728 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.267010 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.267568 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.270578 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.284006 4755 scope.go:117] "RemoveContainer" containerID="5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b" Nov 24 01:30:41 crc kubenswrapper[4755]: E1124 01:30:41.286978 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b\": container with ID starting with 5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b not found: ID does not exist" containerID="5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.287018 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b"} err="failed to get container status \"5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b\": rpc error: code = NotFound desc = could not find container \"5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b\": container with ID starting with 5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b not found: ID does not exist" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.287043 4755 scope.go:117] "RemoveContainer" containerID="b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7" Nov 24 01:30:41 crc kubenswrapper[4755]: E1124 01:30:41.287402 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7\": container with ID starting with b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7 not found: ID does not exist" containerID="b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.287453 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7"} err="failed to get container status \"b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7\": rpc error: code = NotFound desc = could not find container \"b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7\": container with ID starting with b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7 not found: ID does not exist" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.287485 4755 scope.go:117] "RemoveContainer" containerID="5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.288572 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b"} err="failed to get container status \"5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b\": rpc error: code = NotFound desc = could not find container \"5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b\": container with ID starting with 5d42c3dbd282570978a4f161ea5648e1cae0db2f8817e22230be12e8185a700b not found: ID does not exist" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.288674 4755 scope.go:117] "RemoveContainer" containerID="b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.298480 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7"} err="failed to get container status \"b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7\": rpc error: code = NotFound desc = could not find container \"b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7\": container with ID starting with b1a2f5ac02c13f08bc896109fc7163eb031849d21638d481b964e763cd9922d7 not found: ID does not exist" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.351409 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-logs\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.351502 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-config-data\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.351552 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mqbj\" (UniqueName: \"kubernetes.io/projected/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-kube-api-access-9mqbj\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.351583 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.351649 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.453512 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-logs\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.453888 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-logs\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.453929 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-config-data\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.453975 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mqbj\" (UniqueName: \"kubernetes.io/projected/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-kube-api-access-9mqbj\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.454007 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.454081 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.458909 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.458946 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-config-data\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.470659 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.473039 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mqbj\" (UniqueName: \"kubernetes.io/projected/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-kube-api-access-9mqbj\") pod \"nova-metadata-0\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " pod="openstack/nova-metadata-0" Nov 24 01:30:41 crc kubenswrapper[4755]: I1124 01:30:41.601750 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:30:42 crc kubenswrapper[4755]: I1124 01:30:42.009161 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="994b82d0-9135-4cdb-89d8-b772fe5ba79c" path="/var/lib/kubelet/pods/994b82d0-9135-4cdb-89d8-b772fe5ba79c/volumes" Nov 24 01:30:42 crc kubenswrapper[4755]: I1124 01:30:42.074614 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:42 crc kubenswrapper[4755]: W1124 01:30:42.078760 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3a99ebe_bf26_45a6_a43b_89bbfa385a8d.slice/crio-ebac763af3cb86cb30aefdd5a4d7e20425a9707dbbfb5eb13bd8c15be39a0767 WatchSource:0}: Error finding container ebac763af3cb86cb30aefdd5a4d7e20425a9707dbbfb5eb13bd8c15be39a0767: Status 404 returned error can't find the container with id ebac763af3cb86cb30aefdd5a4d7e20425a9707dbbfb5eb13bd8c15be39a0767 Nov 24 01:30:42 crc kubenswrapper[4755]: I1124 01:30:42.195073 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d","Type":"ContainerStarted","Data":"ebac763af3cb86cb30aefdd5a4d7e20425a9707dbbfb5eb13bd8c15be39a0767"} Nov 24 01:30:42 crc kubenswrapper[4755]: E1124 01:30:42.608347 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c211ce7_93af_43de_ab63_044b93a27473.slice/crio-conmon-7d17fdd1cec8829e48d38f4960e052fcc869568b66f24d506522812892416037.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c211ce7_93af_43de_ab63_044b93a27473.slice/crio-7d17fdd1cec8829e48d38f4960e052fcc869568b66f24d506522812892416037.scope\": RecentStats: unable to find data in memory cache]" Nov 24 01:30:43 crc kubenswrapper[4755]: I1124 01:30:43.212393 4755 generic.go:334] "Generic (PLEG): container finished" podID="2efb1e5a-1455-4b1e-892e-ee86d4fdf50b" containerID="04d269e7b1d1837f19b589cec6c6436f35986ca8e4dafe52f5060dc15a29a6fe" exitCode=0 Nov 24 01:30:43 crc kubenswrapper[4755]: I1124 01:30:43.212521 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hggks" event={"ID":"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b","Type":"ContainerDied","Data":"04d269e7b1d1837f19b589cec6c6436f35986ca8e4dafe52f5060dc15a29a6fe"} Nov 24 01:30:43 crc kubenswrapper[4755]: I1124 01:30:43.216955 4755 generic.go:334] "Generic (PLEG): container finished" podID="6c211ce7-93af-43de-ab63-044b93a27473" containerID="7d17fdd1cec8829e48d38f4960e052fcc869568b66f24d506522812892416037" exitCode=0 Nov 24 01:30:43 crc kubenswrapper[4755]: I1124 01:30:43.217026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xrqj7" event={"ID":"6c211ce7-93af-43de-ab63-044b93a27473","Type":"ContainerDied","Data":"7d17fdd1cec8829e48d38f4960e052fcc869568b66f24d506522812892416037"} Nov 24 01:30:43 crc kubenswrapper[4755]: I1124 01:30:43.235639 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d","Type":"ContainerStarted","Data":"6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c"} Nov 24 01:30:43 crc kubenswrapper[4755]: I1124 01:30:43.235717 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d","Type":"ContainerStarted","Data":"8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d"} Nov 24 01:30:43 crc kubenswrapper[4755]: I1124 01:30:43.290546 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.290517452 podStartE2EDuration="2.290517452s" podCreationTimestamp="2025-11-24 01:30:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:43.282667096 +0000 UTC m=+1067.968732597" watchObservedRunningTime="2025-11-24 01:30:43.290517452 +0000 UTC m=+1067.976582993" Nov 24 01:30:44 crc kubenswrapper[4755]: I1124 01:30:44.209752 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 24 01:30:44 crc kubenswrapper[4755]: I1124 01:30:44.221638 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 24 01:30:44 crc kubenswrapper[4755]: I1124 01:30:44.222187 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 24 01:30:44 crc kubenswrapper[4755]: I1124 01:30:44.238559 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 24 01:30:44 crc kubenswrapper[4755]: I1124 01:30:44.291448 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 24 01:30:44 crc kubenswrapper[4755]: I1124 01:30:44.577896 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:30:44 crc kubenswrapper[4755]: I1124 01:30:44.679929 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-9bcwp"] Nov 24 01:30:44 crc kubenswrapper[4755]: I1124 01:30:44.680205 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" podUID="489c124a-e8f4-47a6-bc2e-e3aa9d450909" containerName="dnsmasq-dns" containerID="cri-o://872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1" gracePeriod=10 Nov 24 01:30:44 crc kubenswrapper[4755]: I1124 01:30:44.892241 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:44 crc kubenswrapper[4755]: I1124 01:30:44.897962 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.024284 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-scripts\") pod \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.024389 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-combined-ca-bundle\") pod \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.024482 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blkfn\" (UniqueName: \"kubernetes.io/projected/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-kube-api-access-blkfn\") pod \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.024510 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-config-data\") pod \"6c211ce7-93af-43de-ab63-044b93a27473\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.024547 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-scripts\") pod \"6c211ce7-93af-43de-ab63-044b93a27473\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.024595 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-config-data\") pod \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\" (UID: \"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.024702 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-combined-ca-bundle\") pod \"6c211ce7-93af-43de-ab63-044b93a27473\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.024774 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snnwj\" (UniqueName: \"kubernetes.io/projected/6c211ce7-93af-43de-ab63-044b93a27473-kube-api-access-snnwj\") pod \"6c211ce7-93af-43de-ab63-044b93a27473\" (UID: \"6c211ce7-93af-43de-ab63-044b93a27473\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.032986 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-scripts" (OuterVolumeSpecName: "scripts") pod "2efb1e5a-1455-4b1e-892e-ee86d4fdf50b" (UID: "2efb1e5a-1455-4b1e-892e-ee86d4fdf50b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.035485 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-scripts" (OuterVolumeSpecName: "scripts") pod "6c211ce7-93af-43de-ab63-044b93a27473" (UID: "6c211ce7-93af-43de-ab63-044b93a27473"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.035764 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-kube-api-access-blkfn" (OuterVolumeSpecName: "kube-api-access-blkfn") pod "2efb1e5a-1455-4b1e-892e-ee86d4fdf50b" (UID: "2efb1e5a-1455-4b1e-892e-ee86d4fdf50b"). InnerVolumeSpecName "kube-api-access-blkfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.038524 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c211ce7-93af-43de-ab63-044b93a27473-kube-api-access-snnwj" (OuterVolumeSpecName: "kube-api-access-snnwj") pod "6c211ce7-93af-43de-ab63-044b93a27473" (UID: "6c211ce7-93af-43de-ab63-044b93a27473"). InnerVolumeSpecName "kube-api-access-snnwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.063833 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-config-data" (OuterVolumeSpecName: "config-data") pod "6c211ce7-93af-43de-ab63-044b93a27473" (UID: "6c211ce7-93af-43de-ab63-044b93a27473"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.064148 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2efb1e5a-1455-4b1e-892e-ee86d4fdf50b" (UID: "2efb1e5a-1455-4b1e-892e-ee86d4fdf50b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.070566 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-config-data" (OuterVolumeSpecName: "config-data") pod "2efb1e5a-1455-4b1e-892e-ee86d4fdf50b" (UID: "2efb1e5a-1455-4b1e-892e-ee86d4fdf50b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.085265 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c211ce7-93af-43de-ab63-044b93a27473" (UID: "6c211ce7-93af-43de-ab63-044b93a27473"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.127216 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.127252 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snnwj\" (UniqueName: \"kubernetes.io/projected/6c211ce7-93af-43de-ab63-044b93a27473-kube-api-access-snnwj\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.127264 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.127273 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.127282 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blkfn\" (UniqueName: \"kubernetes.io/projected/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-kube-api-access-blkfn\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.127291 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.127299 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c211ce7-93af-43de-ab63-044b93a27473-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.127307 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.146547 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.229229 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-sb\") pod \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.229281 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-swift-storage-0\") pod \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.229360 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-config\") pod \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.229402 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-nb\") pod \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.229449 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-svc\") pod \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.229677 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4v9\" (UniqueName: \"kubernetes.io/projected/489c124a-e8f4-47a6-bc2e-e3aa9d450909-kube-api-access-4d4v9\") pod \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\" (UID: \"489c124a-e8f4-47a6-bc2e-e3aa9d450909\") " Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.265910 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/489c124a-e8f4-47a6-bc2e-e3aa9d450909-kube-api-access-4d4v9" (OuterVolumeSpecName: "kube-api-access-4d4v9") pod "489c124a-e8f4-47a6-bc2e-e3aa9d450909" (UID: "489c124a-e8f4-47a6-bc2e-e3aa9d450909"). InnerVolumeSpecName "kube-api-access-4d4v9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.291544 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xrqj7" event={"ID":"6c211ce7-93af-43de-ab63-044b93a27473","Type":"ContainerDied","Data":"dae15ef328aa3cd4e4559636aff625c98d36b31d14413582aef3b8be70a63c8a"} Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.291595 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dae15ef328aa3cd4e4559636aff625c98d36b31d14413582aef3b8be70a63c8a" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.291731 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xrqj7" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.302882 4755 generic.go:334] "Generic (PLEG): container finished" podID="489c124a-e8f4-47a6-bc2e-e3aa9d450909" containerID="872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1" exitCode=0 Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.302979 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" event={"ID":"489c124a-e8f4-47a6-bc2e-e3aa9d450909","Type":"ContainerDied","Data":"872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1"} Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.303006 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" event={"ID":"489c124a-e8f4-47a6-bc2e-e3aa9d450909","Type":"ContainerDied","Data":"9290506d75f259b1b020bca05b56ae57c9058964212836056809ba4372caff99"} Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.303021 4755 scope.go:117] "RemoveContainer" containerID="872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.303164 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-9bcwp" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.311889 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f1d611ec-f141-4525-b307-89e15d639897" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.312162 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f1d611ec-f141-4525-b307-89e15d639897" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.324463 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hggks" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.324578 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hggks" event={"ID":"2efb1e5a-1455-4b1e-892e-ee86d4fdf50b","Type":"ContainerDied","Data":"2f83cd0b38de998a838f24e23147c4d0a9a1627c7e82da88a8fedbc87f03c385"} Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.324630 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f83cd0b38de998a838f24e23147c4d0a9a1627c7e82da88a8fedbc87f03c385" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.351777 4755 scope.go:117] "RemoveContainer" containerID="15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.351813 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4v9\" (UniqueName: \"kubernetes.io/projected/489c124a-e8f4-47a6-bc2e-e3aa9d450909-kube-api-access-4d4v9\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.354496 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "489c124a-e8f4-47a6-bc2e-e3aa9d450909" (UID: "489c124a-e8f4-47a6-bc2e-e3aa9d450909"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.367789 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 24 01:30:45 crc kubenswrapper[4755]: E1124 01:30:45.368411 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c211ce7-93af-43de-ab63-044b93a27473" containerName="nova-manage" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.368429 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c211ce7-93af-43de-ab63-044b93a27473" containerName="nova-manage" Nov 24 01:30:45 crc kubenswrapper[4755]: E1124 01:30:45.368477 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2efb1e5a-1455-4b1e-892e-ee86d4fdf50b" containerName="nova-cell1-conductor-db-sync" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.368486 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2efb1e5a-1455-4b1e-892e-ee86d4fdf50b" containerName="nova-cell1-conductor-db-sync" Nov 24 01:30:45 crc kubenswrapper[4755]: E1124 01:30:45.368504 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="489c124a-e8f4-47a6-bc2e-e3aa9d450909" containerName="dnsmasq-dns" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.368511 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="489c124a-e8f4-47a6-bc2e-e3aa9d450909" containerName="dnsmasq-dns" Nov 24 01:30:45 crc kubenswrapper[4755]: E1124 01:30:45.368526 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="489c124a-e8f4-47a6-bc2e-e3aa9d450909" containerName="init" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.368533 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="489c124a-e8f4-47a6-bc2e-e3aa9d450909" containerName="init" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.368837 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2efb1e5a-1455-4b1e-892e-ee86d4fdf50b" containerName="nova-cell1-conductor-db-sync" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.368855 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="489c124a-e8f4-47a6-bc2e-e3aa9d450909" containerName="dnsmasq-dns" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.368875 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c211ce7-93af-43de-ab63-044b93a27473" containerName="nova-manage" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.370458 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.373164 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.388064 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "489c124a-e8f4-47a6-bc2e-e3aa9d450909" (UID: "489c124a-e8f4-47a6-bc2e-e3aa9d450909"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.395645 4755 scope.go:117] "RemoveContainer" containerID="872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.396464 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 24 01:30:45 crc kubenswrapper[4755]: E1124 01:30:45.400958 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1\": container with ID starting with 872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1 not found: ID does not exist" containerID="872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.401016 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1"} err="failed to get container status \"872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1\": rpc error: code = NotFound desc = could not find container \"872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1\": container with ID starting with 872183f3c563b5cf0f1183643539050a19c18accd5eafe8643540fe77307c6a1 not found: ID does not exist" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.401058 4755 scope.go:117] "RemoveContainer" containerID="15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb" Nov 24 01:30:45 crc kubenswrapper[4755]: E1124 01:30:45.401489 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb\": container with ID starting with 15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb not found: ID does not exist" containerID="15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.401509 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb"} err="failed to get container status \"15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb\": rpc error: code = NotFound desc = could not find container \"15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb\": container with ID starting with 15e0514991e2549e463a9cb95908acb4ec29d5a3d4a3ae1c25510dfeefab7bfb not found: ID does not exist" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.403678 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "489c124a-e8f4-47a6-bc2e-e3aa9d450909" (UID: "489c124a-e8f4-47a6-bc2e-e3aa9d450909"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.423823 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "489c124a-e8f4-47a6-bc2e-e3aa9d450909" (UID: "489c124a-e8f4-47a6-bc2e-e3aa9d450909"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.450328 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.450562 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f1d611ec-f141-4525-b307-89e15d639897" containerName="nova-api-log" containerID="cri-o://12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6" gracePeriod=30 Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.451018 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f1d611ec-f141-4525-b307-89e15d639897" containerName="nova-api-api" containerID="cri-o://8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b" gracePeriod=30 Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.456281 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5dhq\" (UniqueName: \"kubernetes.io/projected/8974724e-99a8-4edd-8637-2767f33d3562-kube-api-access-f5dhq\") pod \"nova-cell1-conductor-0\" (UID: \"8974724e-99a8-4edd-8637-2767f33d3562\") " pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.456421 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8974724e-99a8-4edd-8637-2767f33d3562-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8974724e-99a8-4edd-8637-2767f33d3562\") " pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.456439 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8974724e-99a8-4edd-8637-2767f33d3562-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8974724e-99a8-4edd-8637-2767f33d3562\") " pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.456509 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.456521 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.456530 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.456538 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.476910 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-config" (OuterVolumeSpecName: "config") pod "489c124a-e8f4-47a6-bc2e-e3aa9d450909" (UID: "489c124a-e8f4-47a6-bc2e-e3aa9d450909"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.479447 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.499959 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.500198 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" containerName="nova-metadata-log" containerID="cri-o://8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d" gracePeriod=30 Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.500356 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" containerName="nova-metadata-metadata" containerID="cri-o://6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c" gracePeriod=30 Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.557649 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8974724e-99a8-4edd-8637-2767f33d3562-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8974724e-99a8-4edd-8637-2767f33d3562\") " pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.557706 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8974724e-99a8-4edd-8637-2767f33d3562-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8974724e-99a8-4edd-8637-2767f33d3562\") " pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.557799 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5dhq\" (UniqueName: \"kubernetes.io/projected/8974724e-99a8-4edd-8637-2767f33d3562-kube-api-access-f5dhq\") pod \"nova-cell1-conductor-0\" (UID: \"8974724e-99a8-4edd-8637-2767f33d3562\") " pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.557901 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/489c124a-e8f4-47a6-bc2e-e3aa9d450909-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.563239 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8974724e-99a8-4edd-8637-2767f33d3562-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8974724e-99a8-4edd-8637-2767f33d3562\") " pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.563973 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8974724e-99a8-4edd-8637-2767f33d3562-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8974724e-99a8-4edd-8637-2767f33d3562\") " pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.578366 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5dhq\" (UniqueName: \"kubernetes.io/projected/8974724e-99a8-4edd-8637-2767f33d3562-kube-api-access-f5dhq\") pod \"nova-cell1-conductor-0\" (UID: \"8974724e-99a8-4edd-8637-2767f33d3562\") " pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.700451 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.847723 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-9bcwp"] Nov 24 01:30:45 crc kubenswrapper[4755]: I1124 01:30:45.858585 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-9bcwp"] Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.025810 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="489c124a-e8f4-47a6-bc2e-e3aa9d450909" path="/var/lib/kubelet/pods/489c124a-e8f4-47a6-bc2e-e3aa9d450909/volumes" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.179351 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.269284 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-config-data\") pod \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.269701 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-logs\") pod \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.269771 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-nova-metadata-tls-certs\") pod \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.269881 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mqbj\" (UniqueName: \"kubernetes.io/projected/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-kube-api-access-9mqbj\") pod \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.269915 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-combined-ca-bundle\") pod \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\" (UID: \"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d\") " Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.270447 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-logs" (OuterVolumeSpecName: "logs") pod "c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" (UID: "c3a99ebe-bf26-45a6-a43b-89bbfa385a8d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.275085 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-kube-api-access-9mqbj" (OuterVolumeSpecName: "kube-api-access-9mqbj") pod "c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" (UID: "c3a99ebe-bf26-45a6-a43b-89bbfa385a8d"). InnerVolumeSpecName "kube-api-access-9mqbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.301885 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.304688 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-config-data" (OuterVolumeSpecName: "config-data") pod "c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" (UID: "c3a99ebe-bf26-45a6-a43b-89bbfa385a8d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:46 crc kubenswrapper[4755]: W1124 01:30:46.305498 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8974724e_99a8_4edd_8637_2767f33d3562.slice/crio-0f23cab1f4460424986b3cca742d725403f8187690c9bc0b6a50d0eb2fc2420e WatchSource:0}: Error finding container 0f23cab1f4460424986b3cca742d725403f8187690c9bc0b6a50d0eb2fc2420e: Status 404 returned error can't find the container with id 0f23cab1f4460424986b3cca742d725403f8187690c9bc0b6a50d0eb2fc2420e Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.310449 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" (UID: "c3a99ebe-bf26-45a6-a43b-89bbfa385a8d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.332201 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" (UID: "c3a99ebe-bf26-45a6-a43b-89bbfa385a8d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.334271 4755 generic.go:334] "Generic (PLEG): container finished" podID="f1d611ec-f141-4525-b307-89e15d639897" containerID="12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6" exitCode=143 Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.334324 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f1d611ec-f141-4525-b307-89e15d639897","Type":"ContainerDied","Data":"12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6"} Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.335740 4755 generic.go:334] "Generic (PLEG): container finished" podID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" containerID="6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c" exitCode=0 Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.335757 4755 generic.go:334] "Generic (PLEG): container finished" podID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" containerID="8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d" exitCode=143 Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.335785 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d","Type":"ContainerDied","Data":"6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c"} Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.335800 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d","Type":"ContainerDied","Data":"8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d"} Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.335809 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c3a99ebe-bf26-45a6-a43b-89bbfa385a8d","Type":"ContainerDied","Data":"ebac763af3cb86cb30aefdd5a4d7e20425a9707dbbfb5eb13bd8c15be39a0767"} Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.335824 4755 scope.go:117] "RemoveContainer" containerID="6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.335927 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.350062 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="fc9a95c5-01c8-4034-a79e-0d1714219a17" containerName="nova-scheduler-scheduler" containerID="cri-o://e6b8138bdc67e42a0127fd3ae8a66bc36df0abb3ddd7845b46097c139c12808a" gracePeriod=30 Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.350312 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"8974724e-99a8-4edd-8637-2767f33d3562","Type":"ContainerStarted","Data":"0f23cab1f4460424986b3cca742d725403f8187690c9bc0b6a50d0eb2fc2420e"} Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.378353 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.378395 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.378406 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.378420 4755 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.378434 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mqbj\" (UniqueName: \"kubernetes.io/projected/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d-kube-api-access-9mqbj\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.389142 4755 scope.go:117] "RemoveContainer" containerID="8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.395650 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.403345 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.415523 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:46 crc kubenswrapper[4755]: E1124 01:30:46.416251 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" containerName="nova-metadata-log" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.416353 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" containerName="nova-metadata-log" Nov 24 01:30:46 crc kubenswrapper[4755]: E1124 01:30:46.416470 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" containerName="nova-metadata-metadata" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.416560 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" containerName="nova-metadata-metadata" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.416884 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" containerName="nova-metadata-metadata" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.416980 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" containerName="nova-metadata-log" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.418865 4755 scope.go:117] "RemoveContainer" containerID="6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.419660 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: E1124 01:30:46.419943 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c\": container with ID starting with 6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c not found: ID does not exist" containerID="6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.419977 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c"} err="failed to get container status \"6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c\": rpc error: code = NotFound desc = could not find container \"6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c\": container with ID starting with 6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c not found: ID does not exist" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.420003 4755 scope.go:117] "RemoveContainer" containerID="8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d" Nov 24 01:30:46 crc kubenswrapper[4755]: E1124 01:30:46.420268 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d\": container with ID starting with 8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d not found: ID does not exist" containerID="8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.420296 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d"} err="failed to get container status \"8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d\": rpc error: code = NotFound desc = could not find container \"8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d\": container with ID starting with 8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d not found: ID does not exist" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.420349 4755 scope.go:117] "RemoveContainer" containerID="6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.420566 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c"} err="failed to get container status \"6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c\": rpc error: code = NotFound desc = could not find container \"6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c\": container with ID starting with 6b903755e2fc0d5f5f82385f6e0e782c16bc629851595511fd64c5e31d7d5d9c not found: ID does not exist" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.420588 4755 scope.go:117] "RemoveContainer" containerID="8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.421126 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d"} err="failed to get container status \"8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d\": rpc error: code = NotFound desc = could not find container \"8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d\": container with ID starting with 8cd85f8a05437c88d051801f097bbf0afe03ce7ed94a71bab92149e6613f624d not found: ID does not exist" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.425612 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.439477 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.439865 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.479452 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8d01eee-3c39-4dee-be99-74c8528f4516-logs\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.479653 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.479692 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.479929 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrwth\" (UniqueName: \"kubernetes.io/projected/a8d01eee-3c39-4dee-be99-74c8528f4516-kube-api-access-zrwth\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.479996 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-config-data\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.581656 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8d01eee-3c39-4dee-be99-74c8528f4516-logs\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.581773 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.581791 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.581859 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrwth\" (UniqueName: \"kubernetes.io/projected/a8d01eee-3c39-4dee-be99-74c8528f4516-kube-api-access-zrwth\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.581880 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-config-data\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.582713 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8d01eee-3c39-4dee-be99-74c8528f4516-logs\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.586426 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.587115 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.587213 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-config-data\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.604677 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrwth\" (UniqueName: \"kubernetes.io/projected/a8d01eee-3c39-4dee-be99-74c8528f4516-kube-api-access-zrwth\") pod \"nova-metadata-0\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " pod="openstack/nova-metadata-0" Nov 24 01:30:46 crc kubenswrapper[4755]: I1124 01:30:46.757374 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:30:47 crc kubenswrapper[4755]: I1124 01:30:47.262113 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:30:47 crc kubenswrapper[4755]: I1124 01:30:47.407661 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8d01eee-3c39-4dee-be99-74c8528f4516","Type":"ContainerStarted","Data":"718d4160897d72e5744179098872db63a9bd00736dd64352439e840f7829a20b"} Nov 24 01:30:47 crc kubenswrapper[4755]: I1124 01:30:47.426977 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"8974724e-99a8-4edd-8637-2767f33d3562","Type":"ContainerStarted","Data":"fd860e75fcacedced6d548a055594f5d60507afb4f1bbea7d637bbb20ce855ea"} Nov 24 01:30:47 crc kubenswrapper[4755]: I1124 01:30:47.427376 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:47 crc kubenswrapper[4755]: I1124 01:30:47.446989 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.446968953 podStartE2EDuration="2.446968953s" podCreationTimestamp="2025-11-24 01:30:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:47.443081436 +0000 UTC m=+1072.129146937" watchObservedRunningTime="2025-11-24 01:30:47.446968953 +0000 UTC m=+1072.133034454" Nov 24 01:30:48 crc kubenswrapper[4755]: I1124 01:30:48.017290 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3a99ebe-bf26-45a6-a43b-89bbfa385a8d" path="/var/lib/kubelet/pods/c3a99ebe-bf26-45a6-a43b-89bbfa385a8d/volumes" Nov 24 01:30:48 crc kubenswrapper[4755]: I1124 01:30:48.436716 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8d01eee-3c39-4dee-be99-74c8528f4516","Type":"ContainerStarted","Data":"10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7"} Nov 24 01:30:48 crc kubenswrapper[4755]: I1124 01:30:48.436756 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8d01eee-3c39-4dee-be99-74c8528f4516","Type":"ContainerStarted","Data":"dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484"} Nov 24 01:30:48 crc kubenswrapper[4755]: I1124 01:30:48.458870 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.458846082 podStartE2EDuration="2.458846082s" podCreationTimestamp="2025-11-24 01:30:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:48.455260873 +0000 UTC m=+1073.141326374" watchObservedRunningTime="2025-11-24 01:30:48.458846082 +0000 UTC m=+1073.144911593" Nov 24 01:30:49 crc kubenswrapper[4755]: E1124 01:30:49.211839 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e6b8138bdc67e42a0127fd3ae8a66bc36df0abb3ddd7845b46097c139c12808a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 24 01:30:49 crc kubenswrapper[4755]: E1124 01:30:49.213448 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e6b8138bdc67e42a0127fd3ae8a66bc36df0abb3ddd7845b46097c139c12808a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 24 01:30:49 crc kubenswrapper[4755]: E1124 01:30:49.214930 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e6b8138bdc67e42a0127fd3ae8a66bc36df0abb3ddd7845b46097c139c12808a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 24 01:30:49 crc kubenswrapper[4755]: E1124 01:30:49.215022 4755 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="fc9a95c5-01c8-4034-a79e-0d1714219a17" containerName="nova-scheduler-scheduler" Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.472283 4755 generic.go:334] "Generic (PLEG): container finished" podID="fc9a95c5-01c8-4034-a79e-0d1714219a17" containerID="e6b8138bdc67e42a0127fd3ae8a66bc36df0abb3ddd7845b46097c139c12808a" exitCode=0 Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.472361 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fc9a95c5-01c8-4034-a79e-0d1714219a17","Type":"ContainerDied","Data":"e6b8138bdc67e42a0127fd3ae8a66bc36df0abb3ddd7845b46097c139c12808a"} Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.640300 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.663019 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-combined-ca-bundle\") pod \"fc9a95c5-01c8-4034-a79e-0d1714219a17\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.663271 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwsnt\" (UniqueName: \"kubernetes.io/projected/fc9a95c5-01c8-4034-a79e-0d1714219a17-kube-api-access-dwsnt\") pod \"fc9a95c5-01c8-4034-a79e-0d1714219a17\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.663325 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-config-data\") pod \"fc9a95c5-01c8-4034-a79e-0d1714219a17\" (UID: \"fc9a95c5-01c8-4034-a79e-0d1714219a17\") " Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.685015 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc9a95c5-01c8-4034-a79e-0d1714219a17-kube-api-access-dwsnt" (OuterVolumeSpecName: "kube-api-access-dwsnt") pod "fc9a95c5-01c8-4034-a79e-0d1714219a17" (UID: "fc9a95c5-01c8-4034-a79e-0d1714219a17"). InnerVolumeSpecName "kube-api-access-dwsnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.691322 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-config-data" (OuterVolumeSpecName: "config-data") pod "fc9a95c5-01c8-4034-a79e-0d1714219a17" (UID: "fc9a95c5-01c8-4034-a79e-0d1714219a17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.702088 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc9a95c5-01c8-4034-a79e-0d1714219a17" (UID: "fc9a95c5-01c8-4034-a79e-0d1714219a17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.765419 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwsnt\" (UniqueName: \"kubernetes.io/projected/fc9a95c5-01c8-4034-a79e-0d1714219a17-kube-api-access-dwsnt\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.765677 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:50 crc kubenswrapper[4755]: I1124 01:30:50.765800 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9a95c5-01c8-4034-a79e-0d1714219a17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.167770 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.275655 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-config-data\") pod \"f1d611ec-f141-4525-b307-89e15d639897\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.275742 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d611ec-f141-4525-b307-89e15d639897-logs\") pod \"f1d611ec-f141-4525-b307-89e15d639897\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.275848 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ct9vr\" (UniqueName: \"kubernetes.io/projected/f1d611ec-f141-4525-b307-89e15d639897-kube-api-access-ct9vr\") pod \"f1d611ec-f141-4525-b307-89e15d639897\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.275879 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-combined-ca-bundle\") pod \"f1d611ec-f141-4525-b307-89e15d639897\" (UID: \"f1d611ec-f141-4525-b307-89e15d639897\") " Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.276381 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1d611ec-f141-4525-b307-89e15d639897-logs" (OuterVolumeSpecName: "logs") pod "f1d611ec-f141-4525-b307-89e15d639897" (UID: "f1d611ec-f141-4525-b307-89e15d639897"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.276810 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d611ec-f141-4525-b307-89e15d639897-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.278704 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1d611ec-f141-4525-b307-89e15d639897-kube-api-access-ct9vr" (OuterVolumeSpecName: "kube-api-access-ct9vr") pod "f1d611ec-f141-4525-b307-89e15d639897" (UID: "f1d611ec-f141-4525-b307-89e15d639897"). InnerVolumeSpecName "kube-api-access-ct9vr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.302199 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1d611ec-f141-4525-b307-89e15d639897" (UID: "f1d611ec-f141-4525-b307-89e15d639897"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.302639 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-config-data" (OuterVolumeSpecName: "config-data") pod "f1d611ec-f141-4525-b307-89e15d639897" (UID: "f1d611ec-f141-4525-b307-89e15d639897"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.378753 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ct9vr\" (UniqueName: \"kubernetes.io/projected/f1d611ec-f141-4525-b307-89e15d639897-kube-api-access-ct9vr\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.378818 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.378831 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d611ec-f141-4525-b307-89e15d639897-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.482830 4755 generic.go:334] "Generic (PLEG): container finished" podID="f1d611ec-f141-4525-b307-89e15d639897" containerID="8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b" exitCode=0 Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.482911 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f1d611ec-f141-4525-b307-89e15d639897","Type":"ContainerDied","Data":"8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b"} Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.482944 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f1d611ec-f141-4525-b307-89e15d639897","Type":"ContainerDied","Data":"fdab63ae680e4f9e3094ab7ba8e02a1d7d29a336207a45fe5b35fba45a09b72a"} Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.482966 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.483002 4755 scope.go:117] "RemoveContainer" containerID="8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.485724 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fc9a95c5-01c8-4034-a79e-0d1714219a17","Type":"ContainerDied","Data":"75f35531917fdda4a8d0efd6e08bdee10f189c74fd9fac032cc2628275ff72de"} Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.485808 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.522398 4755 scope.go:117] "RemoveContainer" containerID="12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.523256 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.572463 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.584646 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.585914 4755 scope.go:117] "RemoveContainer" containerID="8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b" Nov 24 01:30:51 crc kubenswrapper[4755]: E1124 01:30:51.587117 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b\": container with ID starting with 8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b not found: ID does not exist" containerID="8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.587162 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b"} err="failed to get container status \"8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b\": rpc error: code = NotFound desc = could not find container \"8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b\": container with ID starting with 8d9eb4fa0556bb9abf34f8b23b982a6329923bc219a0cde9db9216fb3a791a1b not found: ID does not exist" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.587186 4755 scope.go:117] "RemoveContainer" containerID="12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6" Nov 24 01:30:51 crc kubenswrapper[4755]: E1124 01:30:51.587585 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6\": container with ID starting with 12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6 not found: ID does not exist" containerID="12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.587627 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6"} err="failed to get container status \"12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6\": rpc error: code = NotFound desc = could not find container \"12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6\": container with ID starting with 12f3e3a0e9b5ec64865711c055ca82c8906da5c00ac1323959d15688eda95af6 not found: ID does not exist" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.587642 4755 scope.go:117] "RemoveContainer" containerID="e6b8138bdc67e42a0127fd3ae8a66bc36df0abb3ddd7845b46097c139c12808a" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.603361 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.612437 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:30:51 crc kubenswrapper[4755]: E1124 01:30:51.612888 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1d611ec-f141-4525-b307-89e15d639897" containerName="nova-api-api" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.612906 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1d611ec-f141-4525-b307-89e15d639897" containerName="nova-api-api" Nov 24 01:30:51 crc kubenswrapper[4755]: E1124 01:30:51.612932 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1d611ec-f141-4525-b307-89e15d639897" containerName="nova-api-log" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.612938 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1d611ec-f141-4525-b307-89e15d639897" containerName="nova-api-log" Nov 24 01:30:51 crc kubenswrapper[4755]: E1124 01:30:51.612955 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc9a95c5-01c8-4034-a79e-0d1714219a17" containerName="nova-scheduler-scheduler" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.612961 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc9a95c5-01c8-4034-a79e-0d1714219a17" containerName="nova-scheduler-scheduler" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.613133 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1d611ec-f141-4525-b307-89e15d639897" containerName="nova-api-log" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.613145 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc9a95c5-01c8-4034-a79e-0d1714219a17" containerName="nova-scheduler-scheduler" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.613158 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1d611ec-f141-4525-b307-89e15d639897" containerName="nova-api-api" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.613866 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.615682 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.635569 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.642676 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.644258 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.646739 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.652556 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.758431 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.758516 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.786754 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lqcl\" (UniqueName: \"kubernetes.io/projected/242276fc-0e87-4b68-b540-a0ea131df85a-kube-api-access-4lqcl\") pod \"nova-scheduler-0\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.786808 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.786829 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.786862 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d35e97cf-abdb-42f5-96b9-24f7486ff607-logs\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.786877 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-config-data\") pod \"nova-scheduler-0\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.787035 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-config-data\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.787058 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72xx6\" (UniqueName: \"kubernetes.io/projected/d35e97cf-abdb-42f5-96b9-24f7486ff607-kube-api-access-72xx6\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.889361 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lqcl\" (UniqueName: \"kubernetes.io/projected/242276fc-0e87-4b68-b540-a0ea131df85a-kube-api-access-4lqcl\") pod \"nova-scheduler-0\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.889435 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.889465 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.889531 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d35e97cf-abdb-42f5-96b9-24f7486ff607-logs\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.889558 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-config-data\") pod \"nova-scheduler-0\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.889653 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-config-data\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.889682 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72xx6\" (UniqueName: \"kubernetes.io/projected/d35e97cf-abdb-42f5-96b9-24f7486ff607-kube-api-access-72xx6\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.890461 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d35e97cf-abdb-42f5-96b9-24f7486ff607-logs\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.894222 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.895833 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.896506 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-config-data\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.897650 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-config-data\") pod \"nova-scheduler-0\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.914515 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lqcl\" (UniqueName: \"kubernetes.io/projected/242276fc-0e87-4b68-b540-a0ea131df85a-kube-api-access-4lqcl\") pod \"nova-scheduler-0\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.916117 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72xx6\" (UniqueName: \"kubernetes.io/projected/d35e97cf-abdb-42f5-96b9-24f7486ff607-kube-api-access-72xx6\") pod \"nova-api-0\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " pod="openstack/nova-api-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.933856 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 24 01:30:51 crc kubenswrapper[4755]: I1124 01:30:51.959070 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:30:52 crc kubenswrapper[4755]: I1124 01:30:52.010773 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1d611ec-f141-4525-b307-89e15d639897" path="/var/lib/kubelet/pods/f1d611ec-f141-4525-b307-89e15d639897/volumes" Nov 24 01:30:52 crc kubenswrapper[4755]: I1124 01:30:52.012196 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc9a95c5-01c8-4034-a79e-0d1714219a17" path="/var/lib/kubelet/pods/fc9a95c5-01c8-4034-a79e-0d1714219a17/volumes" Nov 24 01:30:52 crc kubenswrapper[4755]: I1124 01:30:52.353852 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:30:52 crc kubenswrapper[4755]: W1124 01:30:52.360497 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod242276fc_0e87_4b68_b540_a0ea131df85a.slice/crio-a00719762059e9dcee09e20677122b9156e36f19529fda5438c14f74622be166 WatchSource:0}: Error finding container a00719762059e9dcee09e20677122b9156e36f19529fda5438c14f74622be166: Status 404 returned error can't find the container with id a00719762059e9dcee09e20677122b9156e36f19529fda5438c14f74622be166 Nov 24 01:30:52 crc kubenswrapper[4755]: I1124 01:30:52.438441 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:30:52 crc kubenswrapper[4755]: W1124 01:30:52.441266 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd35e97cf_abdb_42f5_96b9_24f7486ff607.slice/crio-5752f1b6f39cda7f1b2995e3cca71d31e54ea94e9383534ac89b0b1b39d8359d WatchSource:0}: Error finding container 5752f1b6f39cda7f1b2995e3cca71d31e54ea94e9383534ac89b0b1b39d8359d: Status 404 returned error can't find the container with id 5752f1b6f39cda7f1b2995e3cca71d31e54ea94e9383534ac89b0b1b39d8359d Nov 24 01:30:52 crc kubenswrapper[4755]: I1124 01:30:52.497677 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d35e97cf-abdb-42f5-96b9-24f7486ff607","Type":"ContainerStarted","Data":"5752f1b6f39cda7f1b2995e3cca71d31e54ea94e9383534ac89b0b1b39d8359d"} Nov 24 01:30:52 crc kubenswrapper[4755]: I1124 01:30:52.500294 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"242276fc-0e87-4b68-b540-a0ea131df85a","Type":"ContainerStarted","Data":"a00719762059e9dcee09e20677122b9156e36f19529fda5438c14f74622be166"} Nov 24 01:30:53 crc kubenswrapper[4755]: I1124 01:30:53.515796 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d35e97cf-abdb-42f5-96b9-24f7486ff607","Type":"ContainerStarted","Data":"a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340"} Nov 24 01:30:53 crc kubenswrapper[4755]: I1124 01:30:53.516170 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d35e97cf-abdb-42f5-96b9-24f7486ff607","Type":"ContainerStarted","Data":"dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5"} Nov 24 01:30:53 crc kubenswrapper[4755]: I1124 01:30:53.517979 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"242276fc-0e87-4b68-b540-a0ea131df85a","Type":"ContainerStarted","Data":"b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74"} Nov 24 01:30:53 crc kubenswrapper[4755]: I1124 01:30:53.545328 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.545293474 podStartE2EDuration="2.545293474s" podCreationTimestamp="2025-11-24 01:30:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:53.540381649 +0000 UTC m=+1078.226447200" watchObservedRunningTime="2025-11-24 01:30:53.545293474 +0000 UTC m=+1078.231359015" Nov 24 01:30:53 crc kubenswrapper[4755]: I1124 01:30:53.575899 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.575876688 podStartE2EDuration="2.575876688s" podCreationTimestamp="2025-11-24 01:30:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:30:53.563240889 +0000 UTC m=+1078.249306430" watchObservedRunningTime="2025-11-24 01:30:53.575876688 +0000 UTC m=+1078.261942199" Nov 24 01:30:55 crc kubenswrapper[4755]: I1124 01:30:55.753404 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 24 01:30:56 crc kubenswrapper[4755]: I1124 01:30:56.758435 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 24 01:30:56 crc kubenswrapper[4755]: I1124 01:30:56.758887 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 24 01:30:56 crc kubenswrapper[4755]: I1124 01:30:56.935000 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 24 01:30:57 crc kubenswrapper[4755]: I1124 01:30:57.774770 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 24 01:30:57 crc kubenswrapper[4755]: I1124 01:30:57.775005 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 24 01:31:01 crc kubenswrapper[4755]: I1124 01:31:01.935109 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 24 01:31:01 crc kubenswrapper[4755]: I1124 01:31:01.960379 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 24 01:31:01 crc kubenswrapper[4755]: I1124 01:31:01.960425 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 24 01:31:01 crc kubenswrapper[4755]: I1124 01:31:01.970221 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 24 01:31:02 crc kubenswrapper[4755]: I1124 01:31:02.653822 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 24 01:31:03 crc kubenswrapper[4755]: I1124 01:31:03.044933 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 24 01:31:03 crc kubenswrapper[4755]: I1124 01:31:03.045223 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 24 01:31:03 crc kubenswrapper[4755]: I1124 01:31:03.357414 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 24 01:31:06 crc kubenswrapper[4755]: I1124 01:31:06.764726 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 24 01:31:06 crc kubenswrapper[4755]: I1124 01:31:06.766749 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 24 01:31:06 crc kubenswrapper[4755]: I1124 01:31:06.777620 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 24 01:31:06 crc kubenswrapper[4755]: I1124 01:31:06.822718 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 24 01:31:06 crc kubenswrapper[4755]: I1124 01:31:06.823176 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="907891d0-296d-4b4b-a3f3-867979467a98" containerName="kube-state-metrics" containerID="cri-o://4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0" gracePeriod=30 Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.322695 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.519899 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwzn9\" (UniqueName: \"kubernetes.io/projected/907891d0-296d-4b4b-a3f3-867979467a98-kube-api-access-cwzn9\") pod \"907891d0-296d-4b4b-a3f3-867979467a98\" (UID: \"907891d0-296d-4b4b-a3f3-867979467a98\") " Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.538848 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/907891d0-296d-4b4b-a3f3-867979467a98-kube-api-access-cwzn9" (OuterVolumeSpecName: "kube-api-access-cwzn9") pod "907891d0-296d-4b4b-a3f3-867979467a98" (UID: "907891d0-296d-4b4b-a3f3-867979467a98"). InnerVolumeSpecName "kube-api-access-cwzn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.621965 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwzn9\" (UniqueName: \"kubernetes.io/projected/907891d0-296d-4b4b-a3f3-867979467a98-kube-api-access-cwzn9\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.660133 4755 generic.go:334] "Generic (PLEG): container finished" podID="907891d0-296d-4b4b-a3f3-867979467a98" containerID="4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0" exitCode=2 Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.660188 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.660241 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"907891d0-296d-4b4b-a3f3-867979467a98","Type":"ContainerDied","Data":"4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0"} Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.660278 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"907891d0-296d-4b4b-a3f3-867979467a98","Type":"ContainerDied","Data":"afda6f8db1d66ff731d489ae5f5c2f8026bb97f42cd3745e3f28ac71ca3d0f0e"} Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.660302 4755 scope.go:117] "RemoveContainer" containerID="4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.667960 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.687775 4755 scope.go:117] "RemoveContainer" containerID="4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0" Nov 24 01:31:07 crc kubenswrapper[4755]: E1124 01:31:07.688251 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0\": container with ID starting with 4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0 not found: ID does not exist" containerID="4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.688291 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0"} err="failed to get container status \"4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0\": rpc error: code = NotFound desc = could not find container \"4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0\": container with ID starting with 4ac094ba5ae195d237e03177ca3743fa1216f2187e4274d4ceeae0a845aba5d0 not found: ID does not exist" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.719620 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.735277 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.746992 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 24 01:31:07 crc kubenswrapper[4755]: E1124 01:31:07.747498 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="907891d0-296d-4b4b-a3f3-867979467a98" containerName="kube-state-metrics" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.747521 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="907891d0-296d-4b4b-a3f3-867979467a98" containerName="kube-state-metrics" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.747760 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="907891d0-296d-4b4b-a3f3-867979467a98" containerName="kube-state-metrics" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.748543 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.754743 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.754775 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.770397 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.927822 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5l7m\" (UniqueName: \"kubernetes.io/projected/c8452109-2acc-4f1b-848f-e1b5cb87590d-kube-api-access-f5l7m\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.927935 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c8452109-2acc-4f1b-848f-e1b5cb87590d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.928036 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8452109-2acc-4f1b-848f-e1b5cb87590d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:07 crc kubenswrapper[4755]: I1124 01:31:07.928081 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8452109-2acc-4f1b-848f-e1b5cb87590d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.009172 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="907891d0-296d-4b4b-a3f3-867979467a98" path="/var/lib/kubelet/pods/907891d0-296d-4b4b-a3f3-867979467a98/volumes" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.030699 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8452109-2acc-4f1b-848f-e1b5cb87590d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.030869 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8452109-2acc-4f1b-848f-e1b5cb87590d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.031003 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5l7m\" (UniqueName: \"kubernetes.io/projected/c8452109-2acc-4f1b-848f-e1b5cb87590d-kube-api-access-f5l7m\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.031131 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c8452109-2acc-4f1b-848f-e1b5cb87590d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.036047 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8452109-2acc-4f1b-848f-e1b5cb87590d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.036113 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c8452109-2acc-4f1b-848f-e1b5cb87590d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.036737 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8452109-2acc-4f1b-848f-e1b5cb87590d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.050709 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5l7m\" (UniqueName: \"kubernetes.io/projected/c8452109-2acc-4f1b-848f-e1b5cb87590d-kube-api-access-f5l7m\") pod \"kube-state-metrics-0\" (UID: \"c8452109-2acc-4f1b-848f-e1b5cb87590d\") " pod="openstack/kube-state-metrics-0" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.066429 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.601484 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.688125 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c8452109-2acc-4f1b-848f-e1b5cb87590d","Type":"ContainerStarted","Data":"e4df984b087a9efc5cef814ac73acbf77459345cb0564599f7c0a2a92344ad7d"} Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.876911 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.877223 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="ceilometer-central-agent" containerID="cri-o://a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271" gracePeriod=30 Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.877374 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="proxy-httpd" containerID="cri-o://12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff" gracePeriod=30 Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.877427 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="sg-core" containerID="cri-o://a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511" gracePeriod=30 Nov 24 01:31:08 crc kubenswrapper[4755]: I1124 01:31:08.877467 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="ceilometer-notification-agent" containerID="cri-o://2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275" gracePeriod=30 Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.541411 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.664896 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-combined-ca-bundle\") pod \"8647003e-d511-4e5b-9fe4-86049ef105e8\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.664967 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgjpm\" (UniqueName: \"kubernetes.io/projected/8647003e-d511-4e5b-9fe4-86049ef105e8-kube-api-access-wgjpm\") pod \"8647003e-d511-4e5b-9fe4-86049ef105e8\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.665196 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-config-data\") pod \"8647003e-d511-4e5b-9fe4-86049ef105e8\" (UID: \"8647003e-d511-4e5b-9fe4-86049ef105e8\") " Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.670987 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8647003e-d511-4e5b-9fe4-86049ef105e8-kube-api-access-wgjpm" (OuterVolumeSpecName: "kube-api-access-wgjpm") pod "8647003e-d511-4e5b-9fe4-86049ef105e8" (UID: "8647003e-d511-4e5b-9fe4-86049ef105e8"). InnerVolumeSpecName "kube-api-access-wgjpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.693179 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8647003e-d511-4e5b-9fe4-86049ef105e8" (UID: "8647003e-d511-4e5b-9fe4-86049ef105e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.698198 4755 generic.go:334] "Generic (PLEG): container finished" podID="8647003e-d511-4e5b-9fe4-86049ef105e8" containerID="8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2" exitCode=137 Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.698238 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8647003e-d511-4e5b-9fe4-86049ef105e8","Type":"ContainerDied","Data":"8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2"} Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.698272 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.698290 4755 scope.go:117] "RemoveContainer" containerID="8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.698275 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"8647003e-d511-4e5b-9fe4-86049ef105e8","Type":"ContainerDied","Data":"599c3ef7517e82e096d2ff4ea0221c6eaf2c25075a771bc897ee99d4d6ce2e5a"} Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.699555 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c8452109-2acc-4f1b-848f-e1b5cb87590d","Type":"ContainerStarted","Data":"31801f9e9e229e965ae7d00e65c52ec48ce5fcd237fd0d91fa1562cf90c1fa3e"} Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.699646 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.702857 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-config-data" (OuterVolumeSpecName: "config-data") pod "8647003e-d511-4e5b-9fe4-86049ef105e8" (UID: "8647003e-d511-4e5b-9fe4-86049ef105e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.707738 4755 generic.go:334] "Generic (PLEG): container finished" podID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerID="12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff" exitCode=0 Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.707762 4755 generic.go:334] "Generic (PLEG): container finished" podID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerID="a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511" exitCode=2 Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.707769 4755 generic.go:334] "Generic (PLEG): container finished" podID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerID="a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271" exitCode=0 Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.708635 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5de632b3-5130-4cd7-95e5-dbfde5d6738a","Type":"ContainerDied","Data":"12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff"} Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.708669 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5de632b3-5130-4cd7-95e5-dbfde5d6738a","Type":"ContainerDied","Data":"a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511"} Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.708679 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5de632b3-5130-4cd7-95e5-dbfde5d6738a","Type":"ContainerDied","Data":"a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271"} Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.727080 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.370387068 podStartE2EDuration="2.727060379s" podCreationTimestamp="2025-11-24 01:31:07 +0000 UTC" firstStartedPulling="2025-11-24 01:31:08.615752857 +0000 UTC m=+1093.301818358" lastFinishedPulling="2025-11-24 01:31:08.972426168 +0000 UTC m=+1093.658491669" observedRunningTime="2025-11-24 01:31:09.72201675 +0000 UTC m=+1094.408082261" watchObservedRunningTime="2025-11-24 01:31:09.727060379 +0000 UTC m=+1094.413125880" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.736752 4755 scope.go:117] "RemoveContainer" containerID="8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2" Nov 24 01:31:09 crc kubenswrapper[4755]: E1124 01:31:09.737260 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2\": container with ID starting with 8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2 not found: ID does not exist" containerID="8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.737298 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2"} err="failed to get container status \"8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2\": rpc error: code = NotFound desc = could not find container \"8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2\": container with ID starting with 8a6317f2863876d0418c3b1c94446feadb01e8cafddbc6ca00fbfde2853c30f2 not found: ID does not exist" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.767485 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.767534 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8647003e-d511-4e5b-9fe4-86049ef105e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:09 crc kubenswrapper[4755]: I1124 01:31:09.767548 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgjpm\" (UniqueName: \"kubernetes.io/projected/8647003e-d511-4e5b-9fe4-86049ef105e8-kube-api-access-wgjpm\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.040209 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.057662 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.067106 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 24 01:31:10 crc kubenswrapper[4755]: E1124 01:31:10.067548 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8647003e-d511-4e5b-9fe4-86049ef105e8" containerName="nova-cell1-novncproxy-novncproxy" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.067567 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="8647003e-d511-4e5b-9fe4-86049ef105e8" containerName="nova-cell1-novncproxy-novncproxy" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.067805 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="8647003e-d511-4e5b-9fe4-86049ef105e8" containerName="nova-cell1-novncproxy-novncproxy" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.068435 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.072036 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.072389 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.072545 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.073033 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.073120 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.073142 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.073163 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj2mr\" (UniqueName: \"kubernetes.io/projected/0fbd9862-f65c-4c62-8701-83a23ce4211f-kube-api-access-mj2mr\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.073250 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.077101 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.175137 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.175475 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.175500 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.175526 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj2mr\" (UniqueName: \"kubernetes.io/projected/0fbd9862-f65c-4c62-8701-83a23ce4211f-kube-api-access-mj2mr\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.175567 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.180393 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.180822 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.181344 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.187147 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fbd9862-f65c-4c62-8701-83a23ce4211f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.193900 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj2mr\" (UniqueName: \"kubernetes.io/projected/0fbd9862-f65c-4c62-8701-83a23ce4211f-kube-api-access-mj2mr\") pod \"nova-cell1-novncproxy-0\" (UID: \"0fbd9862-f65c-4c62-8701-83a23ce4211f\") " pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.415190 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:10 crc kubenswrapper[4755]: I1124 01:31:10.853348 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 24 01:31:10 crc kubenswrapper[4755]: W1124 01:31:10.854201 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fbd9862_f65c_4c62_8701_83a23ce4211f.slice/crio-f2700e9082fff92417354cd33231ad634f671ae33289b5229f8c1bda180086e5 WatchSource:0}: Error finding container f2700e9082fff92417354cd33231ad634f671ae33289b5229f8c1bda180086e5: Status 404 returned error can't find the container with id f2700e9082fff92417354cd33231ad634f671ae33289b5229f8c1bda180086e5 Nov 24 01:31:11 crc kubenswrapper[4755]: I1124 01:31:11.729383 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0fbd9862-f65c-4c62-8701-83a23ce4211f","Type":"ContainerStarted","Data":"9f41139e2d2ae3bd336ca447ea63e230ef75dea8db3f103720b3a57a778c3564"} Nov 24 01:31:11 crc kubenswrapper[4755]: I1124 01:31:11.730165 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0fbd9862-f65c-4c62-8701-83a23ce4211f","Type":"ContainerStarted","Data":"f2700e9082fff92417354cd33231ad634f671ae33289b5229f8c1bda180086e5"} Nov 24 01:31:11 crc kubenswrapper[4755]: I1124 01:31:11.769505 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.769482673 podStartE2EDuration="1.769482673s" podCreationTimestamp="2025-11-24 01:31:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:31:11.755992021 +0000 UTC m=+1096.442057562" watchObservedRunningTime="2025-11-24 01:31:11.769482673 +0000 UTC m=+1096.455548184" Nov 24 01:31:11 crc kubenswrapper[4755]: I1124 01:31:11.966209 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 24 01:31:11 crc kubenswrapper[4755]: I1124 01:31:11.967049 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 24 01:31:11 crc kubenswrapper[4755]: I1124 01:31:11.970513 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 24 01:31:11 crc kubenswrapper[4755]: I1124 01:31:11.975117 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.006177 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8647003e-d511-4e5b-9fe4-86049ef105e8" path="/var/lib/kubelet/pods/8647003e-d511-4e5b-9fe4-86049ef105e8/volumes" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.681670 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.741146 4755 generic.go:334] "Generic (PLEG): container finished" podID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerID="2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275" exitCode=0 Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.741216 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.741310 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5de632b3-5130-4cd7-95e5-dbfde5d6738a","Type":"ContainerDied","Data":"2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275"} Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.741351 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5de632b3-5130-4cd7-95e5-dbfde5d6738a","Type":"ContainerDied","Data":"ba3b312866cfaf2929e718a5a0c21e243b8cf6cab7e42ad56ef9298fb8340127"} Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.741879 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.741409 4755 scope.go:117] "RemoveContainer" containerID="12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.752138 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.765836 4755 scope.go:117] "RemoveContainer" containerID="a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.824186 4755 scope.go:117] "RemoveContainer" containerID="2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.839158 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-config-data\") pod \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.839475 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-run-httpd\") pod \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.839631 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-scripts\") pod \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.839779 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czx2h\" (UniqueName: \"kubernetes.io/projected/5de632b3-5130-4cd7-95e5-dbfde5d6738a-kube-api-access-czx2h\") pod \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.839904 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-sg-core-conf-yaml\") pod \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.840013 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-log-httpd\") pod \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.840164 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-combined-ca-bundle\") pod \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\" (UID: \"5de632b3-5130-4cd7-95e5-dbfde5d6738a\") " Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.843673 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5de632b3-5130-4cd7-95e5-dbfde5d6738a" (UID: "5de632b3-5130-4cd7-95e5-dbfde5d6738a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.844202 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5de632b3-5130-4cd7-95e5-dbfde5d6738a" (UID: "5de632b3-5130-4cd7-95e5-dbfde5d6738a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.848875 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-scripts" (OuterVolumeSpecName: "scripts") pod "5de632b3-5130-4cd7-95e5-dbfde5d6738a" (UID: "5de632b3-5130-4cd7-95e5-dbfde5d6738a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.867802 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5de632b3-5130-4cd7-95e5-dbfde5d6738a-kube-api-access-czx2h" (OuterVolumeSpecName: "kube-api-access-czx2h") pod "5de632b3-5130-4cd7-95e5-dbfde5d6738a" (UID: "5de632b3-5130-4cd7-95e5-dbfde5d6738a"). InnerVolumeSpecName "kube-api-access-czx2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.875307 4755 scope.go:117] "RemoveContainer" containerID="a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.936461 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5de632b3-5130-4cd7-95e5-dbfde5d6738a" (UID: "5de632b3-5130-4cd7-95e5-dbfde5d6738a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.942448 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.942492 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.942504 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5de632b3-5130-4cd7-95e5-dbfde5d6738a-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.942515 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.942530 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czx2h\" (UniqueName: \"kubernetes.io/projected/5de632b3-5130-4cd7-95e5-dbfde5d6738a-kube-api-access-czx2h\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.949573 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-hgwb2"] Nov 24 01:31:12 crc kubenswrapper[4755]: E1124 01:31:12.950133 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="sg-core" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.950153 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="sg-core" Nov 24 01:31:12 crc kubenswrapper[4755]: E1124 01:31:12.950209 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="ceilometer-notification-agent" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.950218 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="ceilometer-notification-agent" Nov 24 01:31:12 crc kubenswrapper[4755]: E1124 01:31:12.950229 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="proxy-httpd" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.950236 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="proxy-httpd" Nov 24 01:31:12 crc kubenswrapper[4755]: E1124 01:31:12.950253 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="ceilometer-central-agent" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.950262 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="ceilometer-central-agent" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.950486 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="ceilometer-central-agent" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.950515 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="proxy-httpd" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.950531 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="ceilometer-notification-agent" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.950555 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" containerName="sg-core" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.961335 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.963320 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-hgwb2"] Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.978871 4755 scope.go:117] "RemoveContainer" containerID="12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff" Nov 24 01:31:12 crc kubenswrapper[4755]: E1124 01:31:12.979372 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff\": container with ID starting with 12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff not found: ID does not exist" containerID="12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.979394 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff"} err="failed to get container status \"12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff\": rpc error: code = NotFound desc = could not find container \"12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff\": container with ID starting with 12aacff969ea9cda5b9f3874c40796495311edc92ee29e228269094a16a797ff not found: ID does not exist" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.979413 4755 scope.go:117] "RemoveContainer" containerID="a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511" Nov 24 01:31:12 crc kubenswrapper[4755]: E1124 01:31:12.984000 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511\": container with ID starting with a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511 not found: ID does not exist" containerID="a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.984050 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511"} err="failed to get container status \"a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511\": rpc error: code = NotFound desc = could not find container \"a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511\": container with ID starting with a9465e67dc5ac2c0a71160dfc5078323846c8e6ec72d2d04aa4ed06f05875511 not found: ID does not exist" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.984080 4755 scope.go:117] "RemoveContainer" containerID="2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275" Nov 24 01:31:12 crc kubenswrapper[4755]: E1124 01:31:12.985395 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275\": container with ID starting with 2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275 not found: ID does not exist" containerID="2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.985431 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275"} err="failed to get container status \"2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275\": rpc error: code = NotFound desc = could not find container \"2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275\": container with ID starting with 2c69fe56e8872359f6380f1e23d3e7fc270b5d179950e8bbbc6322cd8955a275 not found: ID does not exist" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.985451 4755 scope.go:117] "RemoveContainer" containerID="a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271" Nov 24 01:31:12 crc kubenswrapper[4755]: E1124 01:31:12.990196 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271\": container with ID starting with a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271 not found: ID does not exist" containerID="a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271" Nov 24 01:31:12 crc kubenswrapper[4755]: I1124 01:31:12.990240 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271"} err="failed to get container status \"a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271\": rpc error: code = NotFound desc = could not find container \"a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271\": container with ID starting with a72798220c2d6cabfcf4ee702fcde40865f698f9d3dc29a35135078803dc8271 not found: ID does not exist" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.021548 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5de632b3-5130-4cd7-95e5-dbfde5d6738a" (UID: "5de632b3-5130-4cd7-95e5-dbfde5d6738a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.046237 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-config-data" (OuterVolumeSpecName: "config-data") pod "5de632b3-5130-4cd7-95e5-dbfde5d6738a" (UID: "5de632b3-5130-4cd7-95e5-dbfde5d6738a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.053038 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.053129 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.053571 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-config\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.053690 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9k4g\" (UniqueName: \"kubernetes.io/projected/c80b623c-c6a3-403e-a79f-7e540ac99f4e-kube-api-access-s9k4g\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.053825 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.054065 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.054153 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.054170 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5de632b3-5130-4cd7-95e5-dbfde5d6738a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.155267 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9k4g\" (UniqueName: \"kubernetes.io/projected/c80b623c-c6a3-403e-a79f-7e540ac99f4e-kube-api-access-s9k4g\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.155344 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.155431 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.155471 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.155498 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.155559 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-config\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.156300 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.156506 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.156644 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-config\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.156904 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.157131 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.172178 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9k4g\" (UniqueName: \"kubernetes.io/projected/c80b623c-c6a3-403e-a79f-7e540ac99f4e-kube-api-access-s9k4g\") pod \"dnsmasq-dns-59cf4bdb65-hgwb2\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.320980 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.481280 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.507171 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.525124 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.527314 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.529450 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.529621 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.529714 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.537643 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.567633 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-log-httpd\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.567725 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.567801 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-run-httpd\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.567821 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtm6c\" (UniqueName: \"kubernetes.io/projected/cd53cbcf-1063-422f-a473-b97fff31b260-kube-api-access-jtm6c\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.567844 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-scripts\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.567899 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-config-data\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.567919 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.567949 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.670104 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.670183 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-run-httpd\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.670204 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtm6c\" (UniqueName: \"kubernetes.io/projected/cd53cbcf-1063-422f-a473-b97fff31b260-kube-api-access-jtm6c\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.670244 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-scripts\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.670289 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-config-data\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.670305 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.670334 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.670360 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-log-httpd\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.670843 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-log-httpd\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.672178 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-run-httpd\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.678104 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.678423 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-scripts\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.678442 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.679722 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-config-data\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.694033 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtm6c\" (UniqueName: \"kubernetes.io/projected/cd53cbcf-1063-422f-a473-b97fff31b260-kube-api-access-jtm6c\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.698453 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " pod="openstack/ceilometer-0" Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.837788 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-hgwb2"] Nov 24 01:31:13 crc kubenswrapper[4755]: W1124 01:31:13.845929 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc80b623c_c6a3_403e_a79f_7e540ac99f4e.slice/crio-1cc89a5387b20e3d1b2eda6d2582c1b0e32a3fd869542a9a4219a31628d6744a WatchSource:0}: Error finding container 1cc89a5387b20e3d1b2eda6d2582c1b0e32a3fd869542a9a4219a31628d6744a: Status 404 returned error can't find the container with id 1cc89a5387b20e3d1b2eda6d2582c1b0e32a3fd869542a9a4219a31628d6744a Nov 24 01:31:13 crc kubenswrapper[4755]: I1124 01:31:13.870082 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:31:14 crc kubenswrapper[4755]: I1124 01:31:14.012320 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5de632b3-5130-4cd7-95e5-dbfde5d6738a" path="/var/lib/kubelet/pods/5de632b3-5130-4cd7-95e5-dbfde5d6738a/volumes" Nov 24 01:31:14 crc kubenswrapper[4755]: I1124 01:31:14.331739 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:14 crc kubenswrapper[4755]: I1124 01:31:14.697552 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:14 crc kubenswrapper[4755]: I1124 01:31:14.760168 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd53cbcf-1063-422f-a473-b97fff31b260","Type":"ContainerStarted","Data":"4bc384e44d671c5bd1cbdc604eb7e013c7908181c274aad9a2f653ee58f647a3"} Nov 24 01:31:14 crc kubenswrapper[4755]: I1124 01:31:14.762181 4755 generic.go:334] "Generic (PLEG): container finished" podID="c80b623c-c6a3-403e-a79f-7e540ac99f4e" containerID="e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e" exitCode=0 Nov 24 01:31:14 crc kubenswrapper[4755]: I1124 01:31:14.762263 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" event={"ID":"c80b623c-c6a3-403e-a79f-7e540ac99f4e","Type":"ContainerDied","Data":"e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e"} Nov 24 01:31:14 crc kubenswrapper[4755]: I1124 01:31:14.762297 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" event={"ID":"c80b623c-c6a3-403e-a79f-7e540ac99f4e","Type":"ContainerStarted","Data":"1cc89a5387b20e3d1b2eda6d2582c1b0e32a3fd869542a9a4219a31628d6744a"} Nov 24 01:31:15 crc kubenswrapper[4755]: I1124 01:31:15.323976 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:15 crc kubenswrapper[4755]: I1124 01:31:15.415923 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:15 crc kubenswrapper[4755]: I1124 01:31:15.773780 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd53cbcf-1063-422f-a473-b97fff31b260","Type":"ContainerStarted","Data":"f5426c3a0db25be9cb06bf0eb32faa1b944d2aaf6c5d66100ce614ac9d02389a"} Nov 24 01:31:15 crc kubenswrapper[4755]: I1124 01:31:15.776279 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" event={"ID":"c80b623c-c6a3-403e-a79f-7e540ac99f4e","Type":"ContainerStarted","Data":"855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe"} Nov 24 01:31:15 crc kubenswrapper[4755]: I1124 01:31:15.776391 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerName="nova-api-log" containerID="cri-o://dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5" gracePeriod=30 Nov 24 01:31:15 crc kubenswrapper[4755]: I1124 01:31:15.776451 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerName="nova-api-api" containerID="cri-o://a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340" gracePeriod=30 Nov 24 01:31:15 crc kubenswrapper[4755]: I1124 01:31:15.797387 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" podStartSLOduration=3.797370797 podStartE2EDuration="3.797370797s" podCreationTimestamp="2025-11-24 01:31:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:31:15.793810448 +0000 UTC m=+1100.479875969" watchObservedRunningTime="2025-11-24 01:31:15.797370797 +0000 UTC m=+1100.483436288" Nov 24 01:31:16 crc kubenswrapper[4755]: I1124 01:31:16.785318 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd53cbcf-1063-422f-a473-b97fff31b260","Type":"ContainerStarted","Data":"343a538df85b38310a1480db695d7a86df4622fc9921d0c16eaa6a0fd0ffa0e6"} Nov 24 01:31:16 crc kubenswrapper[4755]: I1124 01:31:16.785570 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd53cbcf-1063-422f-a473-b97fff31b260","Type":"ContainerStarted","Data":"25a647325b5b029340ccde9e0d0b6dad7afb12808db0917d0f59bacc180be88d"} Nov 24 01:31:16 crc kubenswrapper[4755]: I1124 01:31:16.786789 4755 generic.go:334] "Generic (PLEG): container finished" podID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerID="dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5" exitCode=143 Nov 24 01:31:16 crc kubenswrapper[4755]: I1124 01:31:16.786852 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d35e97cf-abdb-42f5-96b9-24f7486ff607","Type":"ContainerDied","Data":"dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5"} Nov 24 01:31:16 crc kubenswrapper[4755]: I1124 01:31:16.787031 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:17 crc kubenswrapper[4755]: I1124 01:31:17.799490 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="ceilometer-central-agent" containerID="cri-o://f5426c3a0db25be9cb06bf0eb32faa1b944d2aaf6c5d66100ce614ac9d02389a" gracePeriod=30 Nov 24 01:31:17 crc kubenswrapper[4755]: I1124 01:31:17.800064 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd53cbcf-1063-422f-a473-b97fff31b260","Type":"ContainerStarted","Data":"da83f87d6d1491e1c07fdd2e4b7ee45a655f446cfe00a49d772c5b3b1a306c59"} Nov 24 01:31:17 crc kubenswrapper[4755]: I1124 01:31:17.800111 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 24 01:31:17 crc kubenswrapper[4755]: I1124 01:31:17.800413 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="proxy-httpd" containerID="cri-o://da83f87d6d1491e1c07fdd2e4b7ee45a655f446cfe00a49d772c5b3b1a306c59" gracePeriod=30 Nov 24 01:31:17 crc kubenswrapper[4755]: I1124 01:31:17.800475 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="sg-core" containerID="cri-o://343a538df85b38310a1480db695d7a86df4622fc9921d0c16eaa6a0fd0ffa0e6" gracePeriod=30 Nov 24 01:31:17 crc kubenswrapper[4755]: I1124 01:31:17.800523 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="ceilometer-notification-agent" containerID="cri-o://25a647325b5b029340ccde9e0d0b6dad7afb12808db0917d0f59bacc180be88d" gracePeriod=30 Nov 24 01:31:17 crc kubenswrapper[4755]: I1124 01:31:17.830963 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.713027729 podStartE2EDuration="4.830943106s" podCreationTimestamp="2025-11-24 01:31:13 +0000 UTC" firstStartedPulling="2025-11-24 01:31:14.360395859 +0000 UTC m=+1099.046461360" lastFinishedPulling="2025-11-24 01:31:17.478311226 +0000 UTC m=+1102.164376737" observedRunningTime="2025-11-24 01:31:17.819405838 +0000 UTC m=+1102.505471349" watchObservedRunningTime="2025-11-24 01:31:17.830943106 +0000 UTC m=+1102.517008607" Nov 24 01:31:18 crc kubenswrapper[4755]: I1124 01:31:18.079242 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 24 01:31:18 crc kubenswrapper[4755]: I1124 01:31:18.810138 4755 generic.go:334] "Generic (PLEG): container finished" podID="cd53cbcf-1063-422f-a473-b97fff31b260" containerID="343a538df85b38310a1480db695d7a86df4622fc9921d0c16eaa6a0fd0ffa0e6" exitCode=2 Nov 24 01:31:18 crc kubenswrapper[4755]: I1124 01:31:18.810422 4755 generic.go:334] "Generic (PLEG): container finished" podID="cd53cbcf-1063-422f-a473-b97fff31b260" containerID="25a647325b5b029340ccde9e0d0b6dad7afb12808db0917d0f59bacc180be88d" exitCode=0 Nov 24 01:31:18 crc kubenswrapper[4755]: I1124 01:31:18.810179 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd53cbcf-1063-422f-a473-b97fff31b260","Type":"ContainerDied","Data":"343a538df85b38310a1480db695d7a86df4622fc9921d0c16eaa6a0fd0ffa0e6"} Nov 24 01:31:18 crc kubenswrapper[4755]: I1124 01:31:18.810461 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd53cbcf-1063-422f-a473-b97fff31b260","Type":"ContainerDied","Data":"25a647325b5b029340ccde9e0d0b6dad7afb12808db0917d0f59bacc180be88d"} Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.375031 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.471931 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d35e97cf-abdb-42f5-96b9-24f7486ff607-logs\") pod \"d35e97cf-abdb-42f5-96b9-24f7486ff607\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.471975 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-combined-ca-bundle\") pod \"d35e97cf-abdb-42f5-96b9-24f7486ff607\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.472026 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72xx6\" (UniqueName: \"kubernetes.io/projected/d35e97cf-abdb-42f5-96b9-24f7486ff607-kube-api-access-72xx6\") pod \"d35e97cf-abdb-42f5-96b9-24f7486ff607\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.472159 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-config-data\") pod \"d35e97cf-abdb-42f5-96b9-24f7486ff607\" (UID: \"d35e97cf-abdb-42f5-96b9-24f7486ff607\") " Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.472478 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d35e97cf-abdb-42f5-96b9-24f7486ff607-logs" (OuterVolumeSpecName: "logs") pod "d35e97cf-abdb-42f5-96b9-24f7486ff607" (UID: "d35e97cf-abdb-42f5-96b9-24f7486ff607"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.472843 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d35e97cf-abdb-42f5-96b9-24f7486ff607-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.478375 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d35e97cf-abdb-42f5-96b9-24f7486ff607-kube-api-access-72xx6" (OuterVolumeSpecName: "kube-api-access-72xx6") pod "d35e97cf-abdb-42f5-96b9-24f7486ff607" (UID: "d35e97cf-abdb-42f5-96b9-24f7486ff607"). InnerVolumeSpecName "kube-api-access-72xx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.500738 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d35e97cf-abdb-42f5-96b9-24f7486ff607" (UID: "d35e97cf-abdb-42f5-96b9-24f7486ff607"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.502984 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-config-data" (OuterVolumeSpecName: "config-data") pod "d35e97cf-abdb-42f5-96b9-24f7486ff607" (UID: "d35e97cf-abdb-42f5-96b9-24f7486ff607"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.574197 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72xx6\" (UniqueName: \"kubernetes.io/projected/d35e97cf-abdb-42f5-96b9-24f7486ff607-kube-api-access-72xx6\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.574233 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.574247 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d35e97cf-abdb-42f5-96b9-24f7486ff607-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.825698 4755 generic.go:334] "Generic (PLEG): container finished" podID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerID="a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340" exitCode=0 Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.825750 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d35e97cf-abdb-42f5-96b9-24f7486ff607","Type":"ContainerDied","Data":"a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340"} Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.825808 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d35e97cf-abdb-42f5-96b9-24f7486ff607","Type":"ContainerDied","Data":"5752f1b6f39cda7f1b2995e3cca71d31e54ea94e9383534ac89b0b1b39d8359d"} Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.825830 4755 scope.go:117] "RemoveContainer" containerID="a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.825995 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.864761 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.867946 4755 scope.go:117] "RemoveContainer" containerID="dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.873059 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.890744 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:19 crc kubenswrapper[4755]: E1124 01:31:19.891210 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerName="nova-api-api" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.891226 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerName="nova-api-api" Nov 24 01:31:19 crc kubenswrapper[4755]: E1124 01:31:19.891239 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerName="nova-api-log" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.891247 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerName="nova-api-log" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.891432 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerName="nova-api-log" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.891461 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" containerName="nova-api-api" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.892372 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.894150 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.894911 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.895754 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.902983 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.915161 4755 scope.go:117] "RemoveContainer" containerID="a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340" Nov 24 01:31:19 crc kubenswrapper[4755]: E1124 01:31:19.915827 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340\": container with ID starting with a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340 not found: ID does not exist" containerID="a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.915856 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340"} err="failed to get container status \"a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340\": rpc error: code = NotFound desc = could not find container \"a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340\": container with ID starting with a564d947ea0b8b1bc6aa028e37fa31687e61042c40f8bc202bdba7497c45f340 not found: ID does not exist" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.915874 4755 scope.go:117] "RemoveContainer" containerID="dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5" Nov 24 01:31:19 crc kubenswrapper[4755]: E1124 01:31:19.916172 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5\": container with ID starting with dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5 not found: ID does not exist" containerID="dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.916193 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5"} err="failed to get container status \"dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5\": rpc error: code = NotFound desc = could not find container \"dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5\": container with ID starting with dabbe714b8ac62b23fb17055bf96606a6cfe4a9424beadcc7ea865e32143bdb5 not found: ID does not exist" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.981985 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9efebb6-5d59-415e-9c61-0ea2d467d266-logs\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.982096 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-public-tls-certs\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.982141 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-config-data\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.982213 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jccz\" (UniqueName: \"kubernetes.io/projected/e9efebb6-5d59-415e-9c61-0ea2d467d266-kube-api-access-6jccz\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.982256 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:19 crc kubenswrapper[4755]: I1124 01:31:19.982288 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.006446 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d35e97cf-abdb-42f5-96b9-24f7486ff607" path="/var/lib/kubelet/pods/d35e97cf-abdb-42f5-96b9-24f7486ff607/volumes" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.084170 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-config-data\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.084588 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jccz\" (UniqueName: \"kubernetes.io/projected/e9efebb6-5d59-415e-9c61-0ea2d467d266-kube-api-access-6jccz\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.084721 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.084834 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.085109 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9efebb6-5d59-415e-9c61-0ea2d467d266-logs\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.085337 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-public-tls-certs\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.087093 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9efebb6-5d59-415e-9c61-0ea2d467d266-logs\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.092228 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-public-tls-certs\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.092239 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-config-data\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.092344 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.092443 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.109503 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jccz\" (UniqueName: \"kubernetes.io/projected/e9efebb6-5d59-415e-9c61-0ea2d467d266-kube-api-access-6jccz\") pod \"nova-api-0\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.222685 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.415937 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.442734 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.736962 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:20 crc kubenswrapper[4755]: W1124 01:31:20.745905 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9efebb6_5d59_415e_9c61_0ea2d467d266.slice/crio-457c99bdd490da0e4f5e7d1c692123f82071d94b4aa93eaf7950295f477d71e7 WatchSource:0}: Error finding container 457c99bdd490da0e4f5e7d1c692123f82071d94b4aa93eaf7950295f477d71e7: Status 404 returned error can't find the container with id 457c99bdd490da0e4f5e7d1c692123f82071d94b4aa93eaf7950295f477d71e7 Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.840017 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9efebb6-5d59-415e-9c61-0ea2d467d266","Type":"ContainerStarted","Data":"457c99bdd490da0e4f5e7d1c692123f82071d94b4aa93eaf7950295f477d71e7"} Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.844600 4755 generic.go:334] "Generic (PLEG): container finished" podID="cd53cbcf-1063-422f-a473-b97fff31b260" containerID="f5426c3a0db25be9cb06bf0eb32faa1b944d2aaf6c5d66100ce614ac9d02389a" exitCode=0 Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.845852 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd53cbcf-1063-422f-a473-b97fff31b260","Type":"ContainerDied","Data":"f5426c3a0db25be9cb06bf0eb32faa1b944d2aaf6c5d66100ce614ac9d02389a"} Nov 24 01:31:20 crc kubenswrapper[4755]: I1124 01:31:20.863327 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.019228 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-b2bn7"] Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.020687 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.022973 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.023701 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.038140 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-b2bn7"] Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.412743 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-scripts\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.412929 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phr8r\" (UniqueName: \"kubernetes.io/projected/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-kube-api-access-phr8r\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.413131 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.414336 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-config-data\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.516135 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phr8r\" (UniqueName: \"kubernetes.io/projected/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-kube-api-access-phr8r\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.516307 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.516354 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-config-data\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.516451 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-scripts\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.520357 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-scripts\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.520696 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.532383 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phr8r\" (UniqueName: \"kubernetes.io/projected/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-kube-api-access-phr8r\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.540017 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-config-data\") pod \"nova-cell1-cell-mapping-b2bn7\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.648509 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.853081 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9efebb6-5d59-415e-9c61-0ea2d467d266","Type":"ContainerStarted","Data":"2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5"} Nov 24 01:31:21 crc kubenswrapper[4755]: I1124 01:31:21.853123 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9efebb6-5d59-415e-9c61-0ea2d467d266","Type":"ContainerStarted","Data":"c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297"} Nov 24 01:31:22 crc kubenswrapper[4755]: I1124 01:31:22.074268 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.074247986 podStartE2EDuration="3.074247986s" podCreationTimestamp="2025-11-24 01:31:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:31:21.88354492 +0000 UTC m=+1106.569610421" watchObservedRunningTime="2025-11-24 01:31:22.074247986 +0000 UTC m=+1106.760313497" Nov 24 01:31:22 crc kubenswrapper[4755]: I1124 01:31:22.079728 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-b2bn7"] Nov 24 01:31:22 crc kubenswrapper[4755]: W1124 01:31:22.080508 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee0ab240_2dbe_4d54_9b69_af5e94e2fd0f.slice/crio-6dfcd3ce982b519d6b1e6bbe3be3f1f9d852583c89e93ba49df552c01fd41991 WatchSource:0}: Error finding container 6dfcd3ce982b519d6b1e6bbe3be3f1f9d852583c89e93ba49df552c01fd41991: Status 404 returned error can't find the container with id 6dfcd3ce982b519d6b1e6bbe3be3f1f9d852583c89e93ba49df552c01fd41991 Nov 24 01:31:22 crc kubenswrapper[4755]: I1124 01:31:22.863260 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b2bn7" event={"ID":"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f","Type":"ContainerStarted","Data":"45c9c9f2107ede10071edaad7cccd1068d8f7d0c8c7642ec9036461f0bab4da8"} Nov 24 01:31:22 crc kubenswrapper[4755]: I1124 01:31:22.863314 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b2bn7" event={"ID":"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f","Type":"ContainerStarted","Data":"6dfcd3ce982b519d6b1e6bbe3be3f1f9d852583c89e93ba49df552c01fd41991"} Nov 24 01:31:22 crc kubenswrapper[4755]: I1124 01:31:22.890424 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-b2bn7" podStartSLOduration=1.89040007 podStartE2EDuration="1.89040007s" podCreationTimestamp="2025-11-24 01:31:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:31:22.885842652 +0000 UTC m=+1107.571908173" watchObservedRunningTime="2025-11-24 01:31:22.89040007 +0000 UTC m=+1107.576465581" Nov 24 01:31:23 crc kubenswrapper[4755]: I1124 01:31:23.323258 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:31:23 crc kubenswrapper[4755]: I1124 01:31:23.389396 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-f9x2z"] Nov 24 01:31:23 crc kubenswrapper[4755]: I1124 01:31:23.389638 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" podUID="80d1eac3-fd1d-43f2-ad80-91b0910244a1" containerName="dnsmasq-dns" containerID="cri-o://a098cf926adcc72d49de933224c9783012efd690f6c066af693c16e3e115a5bd" gracePeriod=10 Nov 24 01:31:23 crc kubenswrapper[4755]: E1124 01:31:23.715294 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80d1eac3_fd1d_43f2_ad80_91b0910244a1.slice/crio-conmon-a098cf926adcc72d49de933224c9783012efd690f6c066af693c16e3e115a5bd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80d1eac3_fd1d_43f2_ad80_91b0910244a1.slice/crio-a098cf926adcc72d49de933224c9783012efd690f6c066af693c16e3e115a5bd.scope\": RecentStats: unable to find data in memory cache]" Nov 24 01:31:23 crc kubenswrapper[4755]: I1124 01:31:23.879768 4755 generic.go:334] "Generic (PLEG): container finished" podID="80d1eac3-fd1d-43f2-ad80-91b0910244a1" containerID="a098cf926adcc72d49de933224c9783012efd690f6c066af693c16e3e115a5bd" exitCode=0 Nov 24 01:31:23 crc kubenswrapper[4755]: I1124 01:31:23.879842 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" event={"ID":"80d1eac3-fd1d-43f2-ad80-91b0910244a1","Type":"ContainerDied","Data":"a098cf926adcc72d49de933224c9783012efd690f6c066af693c16e3e115a5bd"} Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.340864 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.486056 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-swift-storage-0\") pod \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.486158 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npjnb\" (UniqueName: \"kubernetes.io/projected/80d1eac3-fd1d-43f2-ad80-91b0910244a1-kube-api-access-npjnb\") pod \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.486206 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-svc\") pod \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.486240 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-sb\") pod \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.486398 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-config\") pod \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.486447 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-nb\") pod \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\" (UID: \"80d1eac3-fd1d-43f2-ad80-91b0910244a1\") " Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.495833 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80d1eac3-fd1d-43f2-ad80-91b0910244a1-kube-api-access-npjnb" (OuterVolumeSpecName: "kube-api-access-npjnb") pod "80d1eac3-fd1d-43f2-ad80-91b0910244a1" (UID: "80d1eac3-fd1d-43f2-ad80-91b0910244a1"). InnerVolumeSpecName "kube-api-access-npjnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.541022 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "80d1eac3-fd1d-43f2-ad80-91b0910244a1" (UID: "80d1eac3-fd1d-43f2-ad80-91b0910244a1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.543524 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "80d1eac3-fd1d-43f2-ad80-91b0910244a1" (UID: "80d1eac3-fd1d-43f2-ad80-91b0910244a1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.544343 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "80d1eac3-fd1d-43f2-ad80-91b0910244a1" (UID: "80d1eac3-fd1d-43f2-ad80-91b0910244a1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.562957 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "80d1eac3-fd1d-43f2-ad80-91b0910244a1" (UID: "80d1eac3-fd1d-43f2-ad80-91b0910244a1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.572566 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-config" (OuterVolumeSpecName: "config") pod "80d1eac3-fd1d-43f2-ad80-91b0910244a1" (UID: "80d1eac3-fd1d-43f2-ad80-91b0910244a1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.589076 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.589124 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.589144 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.589166 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npjnb\" (UniqueName: \"kubernetes.io/projected/80d1eac3-fd1d-43f2-ad80-91b0910244a1-kube-api-access-npjnb\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.589181 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.589195 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80d1eac3-fd1d-43f2-ad80-91b0910244a1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.890923 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" event={"ID":"80d1eac3-fd1d-43f2-ad80-91b0910244a1","Type":"ContainerDied","Data":"ea62ef234c0b09ee1881ae0e64545021c6ba430102f9b15eaa42f1fb6c813b1a"} Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.890978 4755 scope.go:117] "RemoveContainer" containerID="a098cf926adcc72d49de933224c9783012efd690f6c066af693c16e3e115a5bd" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.891058 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-f9x2z" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.917324 4755 scope.go:117] "RemoveContainer" containerID="a859722809f291b6b2b1455711449198c25a357ea3f20c6d871d63fd9f4d4af2" Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.949194 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-f9x2z"] Nov 24 01:31:24 crc kubenswrapper[4755]: I1124 01:31:24.957177 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-f9x2z"] Nov 24 01:31:26 crc kubenswrapper[4755]: I1124 01:31:26.050749 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80d1eac3-fd1d-43f2-ad80-91b0910244a1" path="/var/lib/kubelet/pods/80d1eac3-fd1d-43f2-ad80-91b0910244a1/volumes" Nov 24 01:31:27 crc kubenswrapper[4755]: I1124 01:31:27.917158 4755 generic.go:334] "Generic (PLEG): container finished" podID="ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f" containerID="45c9c9f2107ede10071edaad7cccd1068d8f7d0c8c7642ec9036461f0bab4da8" exitCode=0 Nov 24 01:31:27 crc kubenswrapper[4755]: I1124 01:31:27.917263 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b2bn7" event={"ID":"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f","Type":"ContainerDied","Data":"45c9c9f2107ede10071edaad7cccd1068d8f7d0c8c7642ec9036461f0bab4da8"} Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.271919 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.325263 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-scripts\") pod \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.325408 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-combined-ca-bundle\") pod \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.325531 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-config-data\") pod \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.325571 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phr8r\" (UniqueName: \"kubernetes.io/projected/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-kube-api-access-phr8r\") pod \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\" (UID: \"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f\") " Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.358003 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-kube-api-access-phr8r" (OuterVolumeSpecName: "kube-api-access-phr8r") pod "ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f" (UID: "ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f"). InnerVolumeSpecName "kube-api-access-phr8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.362790 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-scripts" (OuterVolumeSpecName: "scripts") pod "ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f" (UID: "ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.385801 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-config-data" (OuterVolumeSpecName: "config-data") pod "ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f" (UID: "ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.427675 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.427707 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phr8r\" (UniqueName: \"kubernetes.io/projected/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-kube-api-access-phr8r\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.427721 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.471379 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f" (UID: "ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.528384 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.938416 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b2bn7" event={"ID":"ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f","Type":"ContainerDied","Data":"6dfcd3ce982b519d6b1e6bbe3be3f1f9d852583c89e93ba49df552c01fd41991"} Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.938473 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6dfcd3ce982b519d6b1e6bbe3be3f1f9d852583c89e93ba49df552c01fd41991" Nov 24 01:31:29 crc kubenswrapper[4755]: I1124 01:31:29.938483 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b2bn7" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.122583 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.122920 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e9efebb6-5d59-415e-9c61-0ea2d467d266" containerName="nova-api-log" containerID="cri-o://c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297" gracePeriod=30 Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.123364 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e9efebb6-5d59-415e-9c61-0ea2d467d266" containerName="nova-api-api" containerID="cri-o://2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5" gracePeriod=30 Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.141450 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.141717 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="242276fc-0e87-4b68-b540-a0ea131df85a" containerName="nova-scheduler-scheduler" containerID="cri-o://b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74" gracePeriod=30 Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.156711 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.156972 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-log" containerID="cri-o://dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484" gracePeriod=30 Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.157071 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-metadata" containerID="cri-o://10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7" gracePeriod=30 Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.829492 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.947925 4755 generic.go:334] "Generic (PLEG): container finished" podID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerID="dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484" exitCode=143 Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.947982 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8d01eee-3c39-4dee-be99-74c8528f4516","Type":"ContainerDied","Data":"dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484"} Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.949805 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.949897 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9efebb6-5d59-415e-9c61-0ea2d467d266","Type":"ContainerDied","Data":"2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5"} Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.949981 4755 scope.go:117] "RemoveContainer" containerID="2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.950680 4755 generic.go:334] "Generic (PLEG): container finished" podID="e9efebb6-5d59-415e-9c61-0ea2d467d266" containerID="2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5" exitCode=0 Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.950736 4755 generic.go:334] "Generic (PLEG): container finished" podID="e9efebb6-5d59-415e-9c61-0ea2d467d266" containerID="c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297" exitCode=143 Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.950767 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9efebb6-5d59-415e-9c61-0ea2d467d266","Type":"ContainerDied","Data":"c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297"} Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.950802 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e9efebb6-5d59-415e-9c61-0ea2d467d266","Type":"ContainerDied","Data":"457c99bdd490da0e4f5e7d1c692123f82071d94b4aa93eaf7950295f477d71e7"} Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.968226 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-config-data\") pod \"e9efebb6-5d59-415e-9c61-0ea2d467d266\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.968324 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-internal-tls-certs\") pod \"e9efebb6-5d59-415e-9c61-0ea2d467d266\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.968394 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9efebb6-5d59-415e-9c61-0ea2d467d266-logs\") pod \"e9efebb6-5d59-415e-9c61-0ea2d467d266\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.968430 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jccz\" (UniqueName: \"kubernetes.io/projected/e9efebb6-5d59-415e-9c61-0ea2d467d266-kube-api-access-6jccz\") pod \"e9efebb6-5d59-415e-9c61-0ea2d467d266\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.968449 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-public-tls-certs\") pod \"e9efebb6-5d59-415e-9c61-0ea2d467d266\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.968478 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-combined-ca-bundle\") pod \"e9efebb6-5d59-415e-9c61-0ea2d467d266\" (UID: \"e9efebb6-5d59-415e-9c61-0ea2d467d266\") " Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.968902 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9efebb6-5d59-415e-9c61-0ea2d467d266-logs" (OuterVolumeSpecName: "logs") pod "e9efebb6-5d59-415e-9c61-0ea2d467d266" (UID: "e9efebb6-5d59-415e-9c61-0ea2d467d266"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.969251 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9efebb6-5d59-415e-9c61-0ea2d467d266-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.973884 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9efebb6-5d59-415e-9c61-0ea2d467d266-kube-api-access-6jccz" (OuterVolumeSpecName: "kube-api-access-6jccz") pod "e9efebb6-5d59-415e-9c61-0ea2d467d266" (UID: "e9efebb6-5d59-415e-9c61-0ea2d467d266"). InnerVolumeSpecName "kube-api-access-6jccz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.974844 4755 scope.go:117] "RemoveContainer" containerID="c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.995659 4755 scope.go:117] "RemoveContainer" containerID="2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5" Nov 24 01:31:30 crc kubenswrapper[4755]: E1124 01:31:30.996076 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5\": container with ID starting with 2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5 not found: ID does not exist" containerID="2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.996102 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5"} err="failed to get container status \"2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5\": rpc error: code = NotFound desc = could not find container \"2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5\": container with ID starting with 2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5 not found: ID does not exist" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.996120 4755 scope.go:117] "RemoveContainer" containerID="c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297" Nov 24 01:31:30 crc kubenswrapper[4755]: E1124 01:31:30.997791 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297\": container with ID starting with c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297 not found: ID does not exist" containerID="c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.997822 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297"} err="failed to get container status \"c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297\": rpc error: code = NotFound desc = could not find container \"c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297\": container with ID starting with c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297 not found: ID does not exist" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.997843 4755 scope.go:117] "RemoveContainer" containerID="2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.998149 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5"} err="failed to get container status \"2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5\": rpc error: code = NotFound desc = could not find container \"2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5\": container with ID starting with 2dd6a363c60e1873c8a4083da16fd60a364f38db7ec9552c6994c51cc4f732b5 not found: ID does not exist" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.998172 4755 scope.go:117] "RemoveContainer" containerID="c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297" Nov 24 01:31:30 crc kubenswrapper[4755]: I1124 01:31:30.998433 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297"} err="failed to get container status \"c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297\": rpc error: code = NotFound desc = could not find container \"c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297\": container with ID starting with c924fdc3198eff4e3f2e26fb5e5672422e330473c7229b9971acaa117cf62297 not found: ID does not exist" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.002218 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-config-data" (OuterVolumeSpecName: "config-data") pod "e9efebb6-5d59-415e-9c61-0ea2d467d266" (UID: "e9efebb6-5d59-415e-9c61-0ea2d467d266"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.006230 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9efebb6-5d59-415e-9c61-0ea2d467d266" (UID: "e9efebb6-5d59-415e-9c61-0ea2d467d266"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.022356 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e9efebb6-5d59-415e-9c61-0ea2d467d266" (UID: "e9efebb6-5d59-415e-9c61-0ea2d467d266"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.023221 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e9efebb6-5d59-415e-9c61-0ea2d467d266" (UID: "e9efebb6-5d59-415e-9c61-0ea2d467d266"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.071171 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.071211 4755 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.071223 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jccz\" (UniqueName: \"kubernetes.io/projected/e9efebb6-5d59-415e-9c61-0ea2d467d266-kube-api-access-6jccz\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.071236 4755 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.071245 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9efebb6-5d59-415e-9c61-0ea2d467d266-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.290397 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.298145 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.304330 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:31 crc kubenswrapper[4755]: E1124 01:31:31.304703 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9efebb6-5d59-415e-9c61-0ea2d467d266" containerName="nova-api-log" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.304719 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9efebb6-5d59-415e-9c61-0ea2d467d266" containerName="nova-api-log" Nov 24 01:31:31 crc kubenswrapper[4755]: E1124 01:31:31.304733 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f" containerName="nova-manage" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.304739 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f" containerName="nova-manage" Nov 24 01:31:31 crc kubenswrapper[4755]: E1124 01:31:31.304751 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9efebb6-5d59-415e-9c61-0ea2d467d266" containerName="nova-api-api" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.304757 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9efebb6-5d59-415e-9c61-0ea2d467d266" containerName="nova-api-api" Nov 24 01:31:31 crc kubenswrapper[4755]: E1124 01:31:31.304771 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d1eac3-fd1d-43f2-ad80-91b0910244a1" containerName="dnsmasq-dns" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.304776 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d1eac3-fd1d-43f2-ad80-91b0910244a1" containerName="dnsmasq-dns" Nov 24 01:31:31 crc kubenswrapper[4755]: E1124 01:31:31.304786 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d1eac3-fd1d-43f2-ad80-91b0910244a1" containerName="init" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.304792 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d1eac3-fd1d-43f2-ad80-91b0910244a1" containerName="init" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.304960 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f" containerName="nova-manage" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.304974 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="80d1eac3-fd1d-43f2-ad80-91b0910244a1" containerName="dnsmasq-dns" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.304987 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9efebb6-5d59-415e-9c61-0ea2d467d266" containerName="nova-api-api" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.305002 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9efebb6-5d59-415e-9c61-0ea2d467d266" containerName="nova-api-log" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.305868 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.308153 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.308355 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.308513 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.317892 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.376337 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgz25\" (UniqueName: \"kubernetes.io/projected/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-kube-api-access-mgz25\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.376393 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-config-data\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.376422 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-logs\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.376450 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.376670 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.376789 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-public-tls-certs\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.479135 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgz25\" (UniqueName: \"kubernetes.io/projected/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-kube-api-access-mgz25\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.479197 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-config-data\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.479227 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-logs\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.479254 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.479293 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.479351 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-public-tls-certs\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.479829 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-logs\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.483030 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.484039 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.489254 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-public-tls-certs\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.490403 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-config-data\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.496985 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgz25\" (UniqueName: \"kubernetes.io/projected/d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7-kube-api-access-mgz25\") pod \"nova-api-0\" (UID: \"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7\") " pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: I1124 01:31:31.626444 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 24 01:31:31 crc kubenswrapper[4755]: E1124 01:31:31.936946 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 24 01:31:31 crc kubenswrapper[4755]: E1124 01:31:31.939082 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 24 01:31:31 crc kubenswrapper[4755]: E1124 01:31:31.940971 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 24 01:31:31 crc kubenswrapper[4755]: E1124 01:31:31.941042 4755 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="242276fc-0e87-4b68-b540-a0ea131df85a" containerName="nova-scheduler-scheduler" Nov 24 01:31:32 crc kubenswrapper[4755]: I1124 01:31:32.009885 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9efebb6-5d59-415e-9c61-0ea2d467d266" path="/var/lib/kubelet/pods/e9efebb6-5d59-415e-9c61-0ea2d467d266/volumes" Nov 24 01:31:32 crc kubenswrapper[4755]: W1124 01:31:32.090707 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd237edf7_c0f4_4cfd_a1d2_fd6ac3f585b7.slice/crio-d307f08d149362e3a61993530762ccd6334e04fb51f0c707d06b4819a3ef5161 WatchSource:0}: Error finding container d307f08d149362e3a61993530762ccd6334e04fb51f0c707d06b4819a3ef5161: Status 404 returned error can't find the container with id d307f08d149362e3a61993530762ccd6334e04fb51f0c707d06b4819a3ef5161 Nov 24 01:31:32 crc kubenswrapper[4755]: I1124 01:31:32.093204 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 24 01:31:32 crc kubenswrapper[4755]: I1124 01:31:32.978884 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7","Type":"ContainerStarted","Data":"cbaa29ada2f2b45acc7881a24e9fc1eedf91e3991c46c64816bbbcc372ab3f53"} Nov 24 01:31:32 crc kubenswrapper[4755]: I1124 01:31:32.979165 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7","Type":"ContainerStarted","Data":"46fd148b503e30eef4ea690e166f6d33787d1a73f1625e3450c0f560c8f0aba8"} Nov 24 01:31:32 crc kubenswrapper[4755]: I1124 01:31:32.979179 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7","Type":"ContainerStarted","Data":"d307f08d149362e3a61993530762ccd6334e04fb51f0c707d06b4819a3ef5161"} Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.007496 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.007470834 podStartE2EDuration="2.007470834s" podCreationTimestamp="2025-11-24 01:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:31:33.000119807 +0000 UTC m=+1117.686185358" watchObservedRunningTime="2025-11-24 01:31:33.007470834 +0000 UTC m=+1117.693536355" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.234193 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": read tcp 10.217.0.2:44548->10.217.0.194:8775: read: connection reset by peer" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.234807 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": read tcp 10.217.0.2:44532->10.217.0.194:8775: read: connection reset by peer" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.295224 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.295291 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.647298 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.722415 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrwth\" (UniqueName: \"kubernetes.io/projected/a8d01eee-3c39-4dee-be99-74c8528f4516-kube-api-access-zrwth\") pod \"a8d01eee-3c39-4dee-be99-74c8528f4516\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.722587 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-config-data\") pod \"a8d01eee-3c39-4dee-be99-74c8528f4516\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.722693 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-combined-ca-bundle\") pod \"a8d01eee-3c39-4dee-be99-74c8528f4516\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.722791 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8d01eee-3c39-4dee-be99-74c8528f4516-logs\") pod \"a8d01eee-3c39-4dee-be99-74c8528f4516\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.722816 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-nova-metadata-tls-certs\") pod \"a8d01eee-3c39-4dee-be99-74c8528f4516\" (UID: \"a8d01eee-3c39-4dee-be99-74c8528f4516\") " Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.723723 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8d01eee-3c39-4dee-be99-74c8528f4516-logs" (OuterVolumeSpecName: "logs") pod "a8d01eee-3c39-4dee-be99-74c8528f4516" (UID: "a8d01eee-3c39-4dee-be99-74c8528f4516"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.733101 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8d01eee-3c39-4dee-be99-74c8528f4516-kube-api-access-zrwth" (OuterVolumeSpecName: "kube-api-access-zrwth") pod "a8d01eee-3c39-4dee-be99-74c8528f4516" (UID: "a8d01eee-3c39-4dee-be99-74c8528f4516"). InnerVolumeSpecName "kube-api-access-zrwth". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.824931 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8d01eee-3c39-4dee-be99-74c8528f4516-logs\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.824963 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrwth\" (UniqueName: \"kubernetes.io/projected/a8d01eee-3c39-4dee-be99-74c8528f4516-kube-api-access-zrwth\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.831750 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8d01eee-3c39-4dee-be99-74c8528f4516" (UID: "a8d01eee-3c39-4dee-be99-74c8528f4516"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.863989 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-config-data" (OuterVolumeSpecName: "config-data") pod "a8d01eee-3c39-4dee-be99-74c8528f4516" (UID: "a8d01eee-3c39-4dee-be99-74c8528f4516"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.874838 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "a8d01eee-3c39-4dee-be99-74c8528f4516" (UID: "a8d01eee-3c39-4dee-be99-74c8528f4516"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.926164 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.926198 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:33 crc kubenswrapper[4755]: I1124 01:31:33.926212 4755 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8d01eee-3c39-4dee-be99-74c8528f4516-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.002086 4755 generic.go:334] "Generic (PLEG): container finished" podID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerID="10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7" exitCode=0 Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.002179 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.010011 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8d01eee-3c39-4dee-be99-74c8528f4516","Type":"ContainerDied","Data":"10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7"} Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.010057 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8d01eee-3c39-4dee-be99-74c8528f4516","Type":"ContainerDied","Data":"718d4160897d72e5744179098872db63a9bd00736dd64352439e840f7829a20b"} Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.010094 4755 scope.go:117] "RemoveContainer" containerID="10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.038680 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.047293 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.047492 4755 scope.go:117] "RemoveContainer" containerID="dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.061346 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:31:34 crc kubenswrapper[4755]: E1124 01:31:34.061794 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-metadata" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.061811 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-metadata" Nov 24 01:31:34 crc kubenswrapper[4755]: E1124 01:31:34.061833 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-log" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.061839 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-log" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.062015 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-log" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.062033 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" containerName="nova-metadata-metadata" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.063168 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.065029 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.065477 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.080371 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.084474 4755 scope.go:117] "RemoveContainer" containerID="10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7" Nov 24 01:31:34 crc kubenswrapper[4755]: E1124 01:31:34.085060 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7\": container with ID starting with 10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7 not found: ID does not exist" containerID="10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.085112 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7"} err="failed to get container status \"10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7\": rpc error: code = NotFound desc = could not find container \"10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7\": container with ID starting with 10c91a0c3d76b137832e77acb32503639ecc457a38b61219ffa9cda63ed9f3b7 not found: ID does not exist" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.085145 4755 scope.go:117] "RemoveContainer" containerID="dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484" Nov 24 01:31:34 crc kubenswrapper[4755]: E1124 01:31:34.085463 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484\": container with ID starting with dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484 not found: ID does not exist" containerID="dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.085494 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484"} err="failed to get container status \"dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484\": rpc error: code = NotFound desc = could not find container \"dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484\": container with ID starting with dc6e0a2be2bede39b861a741aa1ea1e5e07d6ad4731c34a26932c5633959a484 not found: ID does not exist" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.131539 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6db35847-3127-4ec0-b617-18e9c0f03f8a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.131595 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6db35847-3127-4ec0-b617-18e9c0f03f8a-logs\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.131641 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6db35847-3127-4ec0-b617-18e9c0f03f8a-config-data\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.131739 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6db35847-3127-4ec0-b617-18e9c0f03f8a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.131772 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9vnj\" (UniqueName: \"kubernetes.io/projected/6db35847-3127-4ec0-b617-18e9c0f03f8a-kube-api-access-x9vnj\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.233394 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6db35847-3127-4ec0-b617-18e9c0f03f8a-config-data\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.233484 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6db35847-3127-4ec0-b617-18e9c0f03f8a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.233530 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9vnj\" (UniqueName: \"kubernetes.io/projected/6db35847-3127-4ec0-b617-18e9c0f03f8a-kube-api-access-x9vnj\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.233658 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6db35847-3127-4ec0-b617-18e9c0f03f8a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.233694 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6db35847-3127-4ec0-b617-18e9c0f03f8a-logs\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.234091 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6db35847-3127-4ec0-b617-18e9c0f03f8a-logs\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.238781 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6db35847-3127-4ec0-b617-18e9c0f03f8a-config-data\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.239273 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6db35847-3127-4ec0-b617-18e9c0f03f8a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.241548 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6db35847-3127-4ec0-b617-18e9c0f03f8a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.255060 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9vnj\" (UniqueName: \"kubernetes.io/projected/6db35847-3127-4ec0-b617-18e9c0f03f8a-kube-api-access-x9vnj\") pod \"nova-metadata-0\" (UID: \"6db35847-3127-4ec0-b617-18e9c0f03f8a\") " pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.384841 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 24 01:31:34 crc kubenswrapper[4755]: I1124 01:31:34.833696 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 24 01:31:35 crc kubenswrapper[4755]: I1124 01:31:35.011160 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6db35847-3127-4ec0-b617-18e9c0f03f8a","Type":"ContainerStarted","Data":"c35f55e65351156b7d7645d7e0e4fd6726fe69c1a7af491ecade0f7b42072c87"} Nov 24 01:31:35 crc kubenswrapper[4755]: I1124 01:31:35.971732 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.019290 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8d01eee-3c39-4dee-be99-74c8528f4516" path="/var/lib/kubelet/pods/a8d01eee-3c39-4dee-be99-74c8528f4516/volumes" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.029735 4755 generic.go:334] "Generic (PLEG): container finished" podID="242276fc-0e87-4b68-b540-a0ea131df85a" containerID="b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74" exitCode=0 Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.029819 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"242276fc-0e87-4b68-b540-a0ea131df85a","Type":"ContainerDied","Data":"b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74"} Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.029845 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"242276fc-0e87-4b68-b540-a0ea131df85a","Type":"ContainerDied","Data":"a00719762059e9dcee09e20677122b9156e36f19529fda5438c14f74622be166"} Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.029868 4755 scope.go:117] "RemoveContainer" containerID="b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.029972 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.036016 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6db35847-3127-4ec0-b617-18e9c0f03f8a","Type":"ContainerStarted","Data":"1f0d027252a9fe84b40db248faac2e7d999179bf7c95b276dcafd328460eb503"} Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.036069 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6db35847-3127-4ec0-b617-18e9c0f03f8a","Type":"ContainerStarted","Data":"8f38f0467b3e77d63c54ad701a2a7050012db42230b429bb4760633515fd147f"} Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.053458 4755 scope.go:117] "RemoveContainer" containerID="b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74" Nov 24 01:31:36 crc kubenswrapper[4755]: E1124 01:31:36.055402 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74\": container with ID starting with b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74 not found: ID does not exist" containerID="b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.055506 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74"} err="failed to get container status \"b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74\": rpc error: code = NotFound desc = could not find container \"b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74\": container with ID starting with b81c4205d29c51292b87d4b596c9017cc12adde42a0637cffab12ff160c3df74 not found: ID does not exist" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.068298 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-combined-ca-bundle\") pod \"242276fc-0e87-4b68-b540-a0ea131df85a\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.068517 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lqcl\" (UniqueName: \"kubernetes.io/projected/242276fc-0e87-4b68-b540-a0ea131df85a-kube-api-access-4lqcl\") pod \"242276fc-0e87-4b68-b540-a0ea131df85a\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.068578 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-config-data\") pod \"242276fc-0e87-4b68-b540-a0ea131df85a\" (UID: \"242276fc-0e87-4b68-b540-a0ea131df85a\") " Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.071550 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.071530177 podStartE2EDuration="2.071530177s" podCreationTimestamp="2025-11-24 01:31:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:31:36.060588909 +0000 UTC m=+1120.746654410" watchObservedRunningTime="2025-11-24 01:31:36.071530177 +0000 UTC m=+1120.757595678" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.074589 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/242276fc-0e87-4b68-b540-a0ea131df85a-kube-api-access-4lqcl" (OuterVolumeSpecName: "kube-api-access-4lqcl") pod "242276fc-0e87-4b68-b540-a0ea131df85a" (UID: "242276fc-0e87-4b68-b540-a0ea131df85a"). InnerVolumeSpecName "kube-api-access-4lqcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.098080 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-config-data" (OuterVolumeSpecName: "config-data") pod "242276fc-0e87-4b68-b540-a0ea131df85a" (UID: "242276fc-0e87-4b68-b540-a0ea131df85a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.106305 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "242276fc-0e87-4b68-b540-a0ea131df85a" (UID: "242276fc-0e87-4b68-b540-a0ea131df85a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.170662 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.170703 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lqcl\" (UniqueName: \"kubernetes.io/projected/242276fc-0e87-4b68-b540-a0ea131df85a-kube-api-access-4lqcl\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.170715 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/242276fc-0e87-4b68-b540-a0ea131df85a-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.378460 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.393408 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.405941 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:31:36 crc kubenswrapper[4755]: E1124 01:31:36.406627 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="242276fc-0e87-4b68-b540-a0ea131df85a" containerName="nova-scheduler-scheduler" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.406710 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="242276fc-0e87-4b68-b540-a0ea131df85a" containerName="nova-scheduler-scheduler" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.407008 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="242276fc-0e87-4b68-b540-a0ea131df85a" containerName="nova-scheduler-scheduler" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.407734 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.410478 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.416941 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.475202 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2bb0127-edec-4f95-a79b-b35b3607c968-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2bb0127-edec-4f95-a79b-b35b3607c968\") " pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.475286 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2bb0127-edec-4f95-a79b-b35b3607c968-config-data\") pod \"nova-scheduler-0\" (UID: \"c2bb0127-edec-4f95-a79b-b35b3607c968\") " pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.475333 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7j2z\" (UniqueName: \"kubernetes.io/projected/c2bb0127-edec-4f95-a79b-b35b3607c968-kube-api-access-b7j2z\") pod \"nova-scheduler-0\" (UID: \"c2bb0127-edec-4f95-a79b-b35b3607c968\") " pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.578337 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2bb0127-edec-4f95-a79b-b35b3607c968-config-data\") pod \"nova-scheduler-0\" (UID: \"c2bb0127-edec-4f95-a79b-b35b3607c968\") " pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.578445 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7j2z\" (UniqueName: \"kubernetes.io/projected/c2bb0127-edec-4f95-a79b-b35b3607c968-kube-api-access-b7j2z\") pod \"nova-scheduler-0\" (UID: \"c2bb0127-edec-4f95-a79b-b35b3607c968\") " pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.578663 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2bb0127-edec-4f95-a79b-b35b3607c968-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2bb0127-edec-4f95-a79b-b35b3607c968\") " pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.585786 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2bb0127-edec-4f95-a79b-b35b3607c968-config-data\") pod \"nova-scheduler-0\" (UID: \"c2bb0127-edec-4f95-a79b-b35b3607c968\") " pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.585804 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2bb0127-edec-4f95-a79b-b35b3607c968-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2bb0127-edec-4f95-a79b-b35b3607c968\") " pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.598143 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7j2z\" (UniqueName: \"kubernetes.io/projected/c2bb0127-edec-4f95-a79b-b35b3607c968-kube-api-access-b7j2z\") pod \"nova-scheduler-0\" (UID: \"c2bb0127-edec-4f95-a79b-b35b3607c968\") " pod="openstack/nova-scheduler-0" Nov 24 01:31:36 crc kubenswrapper[4755]: I1124 01:31:36.758287 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 24 01:31:37 crc kubenswrapper[4755]: I1124 01:31:37.203260 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 24 01:31:37 crc kubenswrapper[4755]: W1124 01:31:37.206059 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2bb0127_edec_4f95_a79b_b35b3607c968.slice/crio-7e43d63770fc1648e8987e739add6076231ba396afb20c1a0125774a44d6a69d WatchSource:0}: Error finding container 7e43d63770fc1648e8987e739add6076231ba396afb20c1a0125774a44d6a69d: Status 404 returned error can't find the container with id 7e43d63770fc1648e8987e739add6076231ba396afb20c1a0125774a44d6a69d Nov 24 01:31:38 crc kubenswrapper[4755]: I1124 01:31:38.009785 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="242276fc-0e87-4b68-b540-a0ea131df85a" path="/var/lib/kubelet/pods/242276fc-0e87-4b68-b540-a0ea131df85a/volumes" Nov 24 01:31:38 crc kubenswrapper[4755]: I1124 01:31:38.058090 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2bb0127-edec-4f95-a79b-b35b3607c968","Type":"ContainerStarted","Data":"b537c1bca22f7c8c74486bce1269b2170de8f1bdcb3bce11095745f5e3d842e6"} Nov 24 01:31:38 crc kubenswrapper[4755]: I1124 01:31:38.058132 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2bb0127-edec-4f95-a79b-b35b3607c968","Type":"ContainerStarted","Data":"7e43d63770fc1648e8987e739add6076231ba396afb20c1a0125774a44d6a69d"} Nov 24 01:31:38 crc kubenswrapper[4755]: I1124 01:31:38.089396 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.089375783 podStartE2EDuration="2.089375783s" podCreationTimestamp="2025-11-24 01:31:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:31:38.083403175 +0000 UTC m=+1122.769468676" watchObservedRunningTime="2025-11-24 01:31:38.089375783 +0000 UTC m=+1122.775441304" Nov 24 01:31:39 crc kubenswrapper[4755]: I1124 01:31:39.385715 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 24 01:31:39 crc kubenswrapper[4755]: I1124 01:31:39.386024 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 24 01:31:41 crc kubenswrapper[4755]: I1124 01:31:41.627670 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 24 01:31:41 crc kubenswrapper[4755]: I1124 01:31:41.628018 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 24 01:31:41 crc kubenswrapper[4755]: I1124 01:31:41.758928 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 24 01:31:42 crc kubenswrapper[4755]: I1124 01:31:42.642772 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 24 01:31:42 crc kubenswrapper[4755]: I1124 01:31:42.642845 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 24 01:31:43 crc kubenswrapper[4755]: I1124 01:31:43.877516 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Nov 24 01:31:44 crc kubenswrapper[4755]: I1124 01:31:44.384929 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 24 01:31:44 crc kubenswrapper[4755]: I1124 01:31:44.385082 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 24 01:31:45 crc kubenswrapper[4755]: I1124 01:31:45.396846 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6db35847-3127-4ec0-b617-18e9c0f03f8a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 24 01:31:45 crc kubenswrapper[4755]: I1124 01:31:45.397702 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6db35847-3127-4ec0-b617-18e9c0f03f8a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 24 01:31:46 crc kubenswrapper[4755]: I1124 01:31:46.759166 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 24 01:31:46 crc kubenswrapper[4755]: I1124 01:31:46.804330 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 24 01:31:47 crc kubenswrapper[4755]: I1124 01:31:47.174559 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.156662 4755 generic.go:334] "Generic (PLEG): container finished" podID="cd53cbcf-1063-422f-a473-b97fff31b260" containerID="da83f87d6d1491e1c07fdd2e4b7ee45a655f446cfe00a49d772c5b3b1a306c59" exitCode=137 Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.157876 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd53cbcf-1063-422f-a473-b97fff31b260","Type":"ContainerDied","Data":"da83f87d6d1491e1c07fdd2e4b7ee45a655f446cfe00a49d772c5b3b1a306c59"} Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.157915 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd53cbcf-1063-422f-a473-b97fff31b260","Type":"ContainerDied","Data":"4bc384e44d671c5bd1cbdc604eb7e013c7908181c274aad9a2f653ee58f647a3"} Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.157929 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4bc384e44d671c5bd1cbdc604eb7e013c7908181c274aad9a2f653ee58f647a3" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.198726 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.241667 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-config-data\") pod \"cd53cbcf-1063-422f-a473-b97fff31b260\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.241735 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-log-httpd\") pod \"cd53cbcf-1063-422f-a473-b97fff31b260\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.241797 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-ceilometer-tls-certs\") pod \"cd53cbcf-1063-422f-a473-b97fff31b260\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.241840 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-sg-core-conf-yaml\") pod \"cd53cbcf-1063-422f-a473-b97fff31b260\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.241884 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-run-httpd\") pod \"cd53cbcf-1063-422f-a473-b97fff31b260\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.241966 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-scripts\") pod \"cd53cbcf-1063-422f-a473-b97fff31b260\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.242082 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtm6c\" (UniqueName: \"kubernetes.io/projected/cd53cbcf-1063-422f-a473-b97fff31b260-kube-api-access-jtm6c\") pod \"cd53cbcf-1063-422f-a473-b97fff31b260\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.242116 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cd53cbcf-1063-422f-a473-b97fff31b260" (UID: "cd53cbcf-1063-422f-a473-b97fff31b260"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.242161 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-combined-ca-bundle\") pod \"cd53cbcf-1063-422f-a473-b97fff31b260\" (UID: \"cd53cbcf-1063-422f-a473-b97fff31b260\") " Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.242535 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cd53cbcf-1063-422f-a473-b97fff31b260" (UID: "cd53cbcf-1063-422f-a473-b97fff31b260"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.242802 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.242829 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd53cbcf-1063-422f-a473-b97fff31b260-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.255665 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd53cbcf-1063-422f-a473-b97fff31b260-kube-api-access-jtm6c" (OuterVolumeSpecName: "kube-api-access-jtm6c") pod "cd53cbcf-1063-422f-a473-b97fff31b260" (UID: "cd53cbcf-1063-422f-a473-b97fff31b260"). InnerVolumeSpecName "kube-api-access-jtm6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.280824 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-scripts" (OuterVolumeSpecName: "scripts") pod "cd53cbcf-1063-422f-a473-b97fff31b260" (UID: "cd53cbcf-1063-422f-a473-b97fff31b260"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.297511 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cd53cbcf-1063-422f-a473-b97fff31b260" (UID: "cd53cbcf-1063-422f-a473-b97fff31b260"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.311083 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "cd53cbcf-1063-422f-a473-b97fff31b260" (UID: "cd53cbcf-1063-422f-a473-b97fff31b260"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.327490 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd53cbcf-1063-422f-a473-b97fff31b260" (UID: "cd53cbcf-1063-422f-a473-b97fff31b260"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.346236 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.346591 4755 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.346750 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.346854 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.346982 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtm6c\" (UniqueName: \"kubernetes.io/projected/cd53cbcf-1063-422f-a473-b97fff31b260-kube-api-access-jtm6c\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.355062 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-config-data" (OuterVolumeSpecName: "config-data") pod "cd53cbcf-1063-422f-a473-b97fff31b260" (UID: "cd53cbcf-1063-422f-a473-b97fff31b260"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:31:48 crc kubenswrapper[4755]: I1124 01:31:48.448564 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd53cbcf-1063-422f-a473-b97fff31b260-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.163759 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.195735 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.219564 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.232469 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:49 crc kubenswrapper[4755]: E1124 01:31:49.233225 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="sg-core" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.233262 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="sg-core" Nov 24 01:31:49 crc kubenswrapper[4755]: E1124 01:31:49.233305 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="proxy-httpd" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.233317 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="proxy-httpd" Nov 24 01:31:49 crc kubenswrapper[4755]: E1124 01:31:49.233359 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="ceilometer-central-agent" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.233373 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="ceilometer-central-agent" Nov 24 01:31:49 crc kubenswrapper[4755]: E1124 01:31:49.233393 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="ceilometer-notification-agent" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.233408 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="ceilometer-notification-agent" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.233806 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="sg-core" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.233857 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="ceilometer-notification-agent" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.233882 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="proxy-httpd" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.233907 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" containerName="ceilometer-central-agent" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.237794 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.238651 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.254301 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.254504 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.254740 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.364025 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-run-httpd\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.364078 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-config-data\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.364111 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.364201 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.364239 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.364270 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-log-httpd\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.364290 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlz64\" (UniqueName: \"kubernetes.io/projected/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-kube-api-access-hlz64\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.364344 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-scripts\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.466281 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.466365 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.466413 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-log-httpd\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.466478 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlz64\" (UniqueName: \"kubernetes.io/projected/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-kube-api-access-hlz64\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.466520 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-scripts\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.466568 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-run-httpd\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.466613 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-config-data\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.466648 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.467325 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-run-httpd\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.467665 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-log-httpd\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.473200 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.473337 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.474665 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.474690 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-scripts\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.475232 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-config-data\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.483441 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlz64\" (UniqueName: \"kubernetes.io/projected/cbaee188-8eb8-461e-ba33-4abbb59c4ef7-kube-api-access-hlz64\") pod \"ceilometer-0\" (UID: \"cbaee188-8eb8-461e-ba33-4abbb59c4ef7\") " pod="openstack/ceilometer-0" Nov 24 01:31:49 crc kubenswrapper[4755]: I1124 01:31:49.579705 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 24 01:31:50 crc kubenswrapper[4755]: I1124 01:31:50.012221 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd53cbcf-1063-422f-a473-b97fff31b260" path="/var/lib/kubelet/pods/cd53cbcf-1063-422f-a473-b97fff31b260/volumes" Nov 24 01:31:50 crc kubenswrapper[4755]: I1124 01:31:50.063510 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 24 01:31:50 crc kubenswrapper[4755]: I1124 01:31:50.174152 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cbaee188-8eb8-461e-ba33-4abbb59c4ef7","Type":"ContainerStarted","Data":"0a4c4e062816430f72bf41fd88ed80c13fb442c47fbca33c201ffc61cc2be002"} Nov 24 01:31:51 crc kubenswrapper[4755]: I1124 01:31:51.186785 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cbaee188-8eb8-461e-ba33-4abbb59c4ef7","Type":"ContainerStarted","Data":"37dd03a4262fdb1f4a8ecc1b1d749618bccb225aa11b5479015b332d42bc32c6"} Nov 24 01:31:51 crc kubenswrapper[4755]: I1124 01:31:51.636340 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 24 01:31:51 crc kubenswrapper[4755]: I1124 01:31:51.638303 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 24 01:31:51 crc kubenswrapper[4755]: I1124 01:31:51.638340 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 24 01:31:51 crc kubenswrapper[4755]: I1124 01:31:51.655847 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 24 01:31:52 crc kubenswrapper[4755]: I1124 01:31:52.200764 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cbaee188-8eb8-461e-ba33-4abbb59c4ef7","Type":"ContainerStarted","Data":"cda5756e75b4b5a027d1737007e40c2c772ba838e7561c4734b02ee0169c0537"} Nov 24 01:31:52 crc kubenswrapper[4755]: I1124 01:31:52.201092 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 24 01:31:52 crc kubenswrapper[4755]: I1124 01:31:52.201113 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cbaee188-8eb8-461e-ba33-4abbb59c4ef7","Type":"ContainerStarted","Data":"8218875f454cf0b232cac25074c28bdd6885a179768c74b9a5c8ad623ed4137f"} Nov 24 01:31:52 crc kubenswrapper[4755]: I1124 01:31:52.226171 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 24 01:31:54 crc kubenswrapper[4755]: I1124 01:31:54.224279 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cbaee188-8eb8-461e-ba33-4abbb59c4ef7","Type":"ContainerStarted","Data":"8e6bdcd850eb58eddb561d91a92c784e00d7657dd26bf0d257db63a77f442858"} Nov 24 01:31:54 crc kubenswrapper[4755]: I1124 01:31:54.257335 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.949824298 podStartE2EDuration="5.257314701s" podCreationTimestamp="2025-11-24 01:31:49 +0000 UTC" firstStartedPulling="2025-11-24 01:31:50.055393841 +0000 UTC m=+1134.741459332" lastFinishedPulling="2025-11-24 01:31:53.362884224 +0000 UTC m=+1138.048949735" observedRunningTime="2025-11-24 01:31:54.248502233 +0000 UTC m=+1138.934567744" watchObservedRunningTime="2025-11-24 01:31:54.257314701 +0000 UTC m=+1138.943380222" Nov 24 01:31:54 crc kubenswrapper[4755]: I1124 01:31:54.391029 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 24 01:31:54 crc kubenswrapper[4755]: I1124 01:31:54.392092 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 24 01:31:54 crc kubenswrapper[4755]: I1124 01:31:54.395398 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 24 01:31:55 crc kubenswrapper[4755]: I1124 01:31:55.241853 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 24 01:31:55 crc kubenswrapper[4755]: I1124 01:31:55.250084 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 24 01:32:03 crc kubenswrapper[4755]: I1124 01:32:03.294683 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:32:03 crc kubenswrapper[4755]: I1124 01:32:03.295241 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:32:19 crc kubenswrapper[4755]: I1124 01:32:19.591146 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 24 01:32:29 crc kubenswrapper[4755]: I1124 01:32:29.340122 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 24 01:32:30 crc kubenswrapper[4755]: I1124 01:32:30.187182 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 24 01:32:33 crc kubenswrapper[4755]: I1124 01:32:33.295423 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:32:33 crc kubenswrapper[4755]: I1124 01:32:33.295775 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:32:33 crc kubenswrapper[4755]: I1124 01:32:33.295826 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:32:33 crc kubenswrapper[4755]: I1124 01:32:33.296560 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"77857823e666ac5615c021b67cb4fcc6f558c850cc69c4d388ccf77b95626fc7"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:32:33 crc kubenswrapper[4755]: I1124 01:32:33.296657 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://77857823e666ac5615c021b67cb4fcc6f558c850cc69c4d388ccf77b95626fc7" gracePeriod=600 Nov 24 01:32:33 crc kubenswrapper[4755]: I1124 01:32:33.636263 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" containerName="rabbitmq" containerID="cri-o://a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147" gracePeriod=604796 Nov 24 01:32:33 crc kubenswrapper[4755]: I1124 01:32:33.698085 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="77857823e666ac5615c021b67cb4fcc6f558c850cc69c4d388ccf77b95626fc7" exitCode=0 Nov 24 01:32:33 crc kubenswrapper[4755]: I1124 01:32:33.698143 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"77857823e666ac5615c021b67cb4fcc6f558c850cc69c4d388ccf77b95626fc7"} Nov 24 01:32:33 crc kubenswrapper[4755]: I1124 01:32:33.698395 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"6046690ccd6cfb15aff12bbb82767c5a83c9c84c292122438b2c2722b65a5466"} Nov 24 01:32:33 crc kubenswrapper[4755]: I1124 01:32:33.698416 4755 scope.go:117] "RemoveContainer" containerID="d1576ec75c38e5c634d28cddad8ee45995a487ee45005883f5a41207a6c2c9de" Nov 24 01:32:34 crc kubenswrapper[4755]: I1124 01:32:34.166669 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="20a66507-c5f4-43d2-a99b-18daaffea30f" containerName="rabbitmq" containerID="cri-o://27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b" gracePeriod=604797 Nov 24 01:32:34 crc kubenswrapper[4755]: I1124 01:32:34.877963 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Nov 24 01:32:35 crc kubenswrapper[4755]: I1124 01:32:35.137484 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="20a66507-c5f4-43d2-a99b-18daaffea30f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.222482 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.314773 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-pod-info\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.315113 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-erlang-cookie\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.315143 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-tls\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.315177 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-plugins\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.315222 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-erlang-cookie-secret\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.315271 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.315303 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-server-conf\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.315357 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-config-data\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.315385 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-plugins-conf\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.315415 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5q2wb\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-kube-api-access-5q2wb\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.315447 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-confd\") pod \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\" (UID: \"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.317179 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.317518 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.318140 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.322867 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.324586 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.324657 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.327808 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-kube-api-access-5q2wb" (OuterVolumeSpecName: "kube-api-access-5q2wb") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "kube-api-access-5q2wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.379495 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-pod-info" (OuterVolumeSpecName: "pod-info") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.382680 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-config-data" (OuterVolumeSpecName: "config-data") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.395818 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-server-conf" (OuterVolumeSpecName: "server-conf") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.418864 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.418889 4755 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-server-conf\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.418898 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.418907 4755 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.418916 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5q2wb\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-kube-api-access-5q2wb\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.419150 4755 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-pod-info\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.419279 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.419293 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.419300 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.419308 4755 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.455278 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" (UID: "34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.477440 4755 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.520764 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.520787 4755 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.679899 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.730407 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-plugins\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.730812 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-config-data\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.730845 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-erlang-cookie\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.730935 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz5ws\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-kube-api-access-lz5ws\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.730980 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-tls\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.731037 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-confd\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.731111 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.731180 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/20a66507-c5f4-43d2-a99b-18daaffea30f-pod-info\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.731210 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-server-conf\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.731246 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-plugins-conf\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.731286 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/20a66507-c5f4-43d2-a99b-18daaffea30f-erlang-cookie-secret\") pod \"20a66507-c5f4-43d2-a99b-18daaffea30f\" (UID: \"20a66507-c5f4-43d2-a99b-18daaffea30f\") " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.735796 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20a66507-c5f4-43d2-a99b-18daaffea30f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.736107 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.737802 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.741211 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.742517 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/20a66507-c5f4-43d2-a99b-18daaffea30f-pod-info" (OuterVolumeSpecName: "pod-info") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.749150 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.749993 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-kube-api-access-lz5ws" (OuterVolumeSpecName: "kube-api-access-lz5ws") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "kube-api-access-lz5ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.750822 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.781261 4755 generic.go:334] "Generic (PLEG): container finished" podID="20a66507-c5f4-43d2-a99b-18daaffea30f" containerID="27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b" exitCode=0 Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.781332 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.781348 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"20a66507-c5f4-43d2-a99b-18daaffea30f","Type":"ContainerDied","Data":"27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b"} Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.781377 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"20a66507-c5f4-43d2-a99b-18daaffea30f","Type":"ContainerDied","Data":"28f549736fa37cb8177eef2aa573ddffb24e3240a66e3c3f3bb3369c7fea986f"} Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.781395 4755 scope.go:117] "RemoveContainer" containerID="27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.789731 4755 generic.go:334] "Generic (PLEG): container finished" podID="34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" containerID="a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147" exitCode=0 Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.789770 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed","Type":"ContainerDied","Data":"a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147"} Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.789798 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed","Type":"ContainerDied","Data":"74cde227ef5aca9f07445f6af776d7c21b58982666cbb772844715c08e07d776"} Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.789850 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.796664 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-config-data" (OuterVolumeSpecName: "config-data") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.809550 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-server-conf" (OuterVolumeSpecName: "server-conf") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.814020 4755 scope.go:117] "RemoveContainer" containerID="01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.832566 4755 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/20a66507-c5f4-43d2-a99b-18daaffea30f-pod-info\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.832619 4755 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-server-conf\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.832631 4755 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.832641 4755 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/20a66507-c5f4-43d2-a99b-18daaffea30f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.832650 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.832658 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/20a66507-c5f4-43d2-a99b-18daaffea30f-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.832666 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.832675 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz5ws\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-kube-api-access-lz5ws\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.832682 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.832709 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.836648 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.873739 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.875400 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "20a66507-c5f4-43d2-a99b-18daaffea30f" (UID: "20a66507-c5f4-43d2-a99b-18daaffea30f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.886489 4755 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.899913 4755 scope.go:117] "RemoveContainer" containerID="27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.917384 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 24 01:32:40 crc kubenswrapper[4755]: E1124 01:32:40.917817 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b\": container with ID starting with 27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b not found: ID does not exist" containerID="27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.917851 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b"} err="failed to get container status \"27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b\": rpc error: code = NotFound desc = could not find container \"27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b\": container with ID starting with 27d007891e967c7fca63becaea7ed8b839367ea5c8b7443b2a527e82d2fbd53b not found: ID does not exist" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.917874 4755 scope.go:117] "RemoveContainer" containerID="01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563" Nov 24 01:32:40 crc kubenswrapper[4755]: E1124 01:32:40.918191 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563\": container with ID starting with 01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563 not found: ID does not exist" containerID="01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.918215 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563"} err="failed to get container status \"01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563\": rpc error: code = NotFound desc = could not find container \"01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563\": container with ID starting with 01167809120deee76264fb76f66c01e755d3143e759623ecc51641de11bb6563 not found: ID does not exist" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.918227 4755 scope.go:117] "RemoveContainer" containerID="a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147" Nov 24 01:32:40 crc kubenswrapper[4755]: E1124 01:32:40.918312 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20a66507-c5f4-43d2-a99b-18daaffea30f" containerName="rabbitmq" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.918323 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="20a66507-c5f4-43d2-a99b-18daaffea30f" containerName="rabbitmq" Nov 24 01:32:40 crc kubenswrapper[4755]: E1124 01:32:40.918340 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" containerName="setup-container" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.918348 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" containerName="setup-container" Nov 24 01:32:40 crc kubenswrapper[4755]: E1124 01:32:40.918359 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" containerName="rabbitmq" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.918365 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" containerName="rabbitmq" Nov 24 01:32:40 crc kubenswrapper[4755]: E1124 01:32:40.918379 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20a66507-c5f4-43d2-a99b-18daaffea30f" containerName="setup-container" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.918385 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="20a66507-c5f4-43d2-a99b-18daaffea30f" containerName="setup-container" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.918963 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" containerName="rabbitmq" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.918976 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="20a66507-c5f4-43d2-a99b-18daaffea30f" containerName="rabbitmq" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.919939 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.922233 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.927352 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.927590 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.927754 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.928137 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.928254 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-g5576" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.928482 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.929026 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.936541 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/20a66507-c5f4-43d2-a99b-18daaffea30f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.937643 4755 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.977491 4755 scope.go:117] "RemoveContainer" containerID="5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.999244 4755 scope.go:117] "RemoveContainer" containerID="a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147" Nov 24 01:32:40 crc kubenswrapper[4755]: E1124 01:32:40.999631 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147\": container with ID starting with a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147 not found: ID does not exist" containerID="a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.999658 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147"} err="failed to get container status \"a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147\": rpc error: code = NotFound desc = could not find container \"a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147\": container with ID starting with a5f4b9f72d7febdbc07a67f9555604b9a20f551ec4c658e6634ffb48e5392147 not found: ID does not exist" Nov 24 01:32:40 crc kubenswrapper[4755]: I1124 01:32:40.999677 4755 scope.go:117] "RemoveContainer" containerID="5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca" Nov 24 01:32:41 crc kubenswrapper[4755]: E1124 01:32:40.999983 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca\": container with ID starting with 5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca not found: ID does not exist" containerID="5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.000031 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca"} err="failed to get container status \"5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca\": rpc error: code = NotFound desc = could not find container \"5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca\": container with ID starting with 5158b16b766a448d141bf8a6f42a136b467e9d76431b7b8f0bf4a3de684c35ca not found: ID does not exist" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.039508 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.039584 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-config-data\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.039624 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.039723 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27b9b\" (UniqueName: \"kubernetes.io/projected/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-kube-api-access-27b9b\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.039768 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.039811 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.039896 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.040031 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.040109 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.040176 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.040227 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.125782 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.135793 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.142303 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.142385 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.142477 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.143331 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.143364 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.143402 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.143408 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.143557 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.143755 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.143786 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-config-data\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.143853 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.143939 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27b9b\" (UniqueName: \"kubernetes.io/projected/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-kube-api-access-27b9b\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.144009 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.144540 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.145046 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-config-data\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.145567 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.145942 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.147246 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.153468 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.157031 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.168044 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.174655 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.177374 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.179317 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27b9b\" (UniqueName: \"kubernetes.io/projected/47ce72c9-6a27-44ff-80ed-d844fa0f1d2e-kube-api-access-27b9b\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.182111 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.182441 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.182672 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.182710 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.182949 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.183078 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.183143 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dv59m" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.183177 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.188933 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e\") " pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.278180 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.350275 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.350465 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.350540 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7999d\" (UniqueName: \"kubernetes.io/projected/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-kube-api-access-7999d\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.350845 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.351106 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.351343 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.351486 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.351722 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.352047 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.352169 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.352204 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455167 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455416 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455443 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455460 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455473 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455490 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455522 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455542 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7999d\" (UniqueName: \"kubernetes.io/projected/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-kube-api-access-7999d\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455570 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455636 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.455680 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.456122 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.456172 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.456550 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.456772 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.468853 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.469663 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.469681 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.469824 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.470428 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.477937 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7999d\" (UniqueName: \"kubernetes.io/projected/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-kube-api-access-7999d\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.478632 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba8cbfc3-74a5-4ea6-bd18-8fcab5462623-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.521174 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623\") " pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.560557 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.756421 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 24 01:32:41 crc kubenswrapper[4755]: I1124 01:32:41.800019 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e","Type":"ContainerStarted","Data":"b4f0a2747fb28dcadc992fc63711bfc54edd406546226c480b87a35ca61f9d0d"} Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.008082 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20a66507-c5f4-43d2-a99b-18daaffea30f" path="/var/lib/kubelet/pods/20a66507-c5f4-43d2-a99b-18daaffea30f/volumes" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.008906 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed" path="/var/lib/kubelet/pods/34cdc0e3-2bc4-4fd0-ae04-e549ef0ceeed/volumes" Nov 24 01:32:42 crc kubenswrapper[4755]: W1124 01:32:42.014056 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba8cbfc3_74a5_4ea6_bd18_8fcab5462623.slice/crio-9f3ea74bdc8c1dfdad61b9346ba6e268f01c442ca101a7689a88ba88da92dbc6 WatchSource:0}: Error finding container 9f3ea74bdc8c1dfdad61b9346ba6e268f01c442ca101a7689a88ba88da92dbc6: Status 404 returned error can't find the container with id 9f3ea74bdc8c1dfdad61b9346ba6e268f01c442ca101a7689a88ba88da92dbc6 Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.016026 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.631518 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-7857q"] Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.634275 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.636837 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.649766 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-7857q"] Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.780411 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.781060 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-config\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.781102 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-svc\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.781146 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.781218 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.781309 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd4cf\" (UniqueName: \"kubernetes.io/projected/40269c4d-1151-4a12-972b-bbc38662d04e-kube-api-access-bd4cf\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.781433 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.812741 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623","Type":"ContainerStarted","Data":"9f3ea74bdc8c1dfdad61b9346ba6e268f01c442ca101a7689a88ba88da92dbc6"} Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.882784 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.882850 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-config\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.882887 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-svc\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.882909 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.882925 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.882944 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd4cf\" (UniqueName: \"kubernetes.io/projected/40269c4d-1151-4a12-972b-bbc38662d04e-kube-api-access-bd4cf\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.882989 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.883902 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.884438 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.885024 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-config\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.885520 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.885700 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.886202 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-svc\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.908650 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd4cf\" (UniqueName: \"kubernetes.io/projected/40269c4d-1151-4a12-972b-bbc38662d04e-kube-api-access-bd4cf\") pod \"dnsmasq-dns-67b789f86c-7857q\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:42 crc kubenswrapper[4755]: I1124 01:32:42.953795 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:43 crc kubenswrapper[4755]: I1124 01:32:43.545793 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-7857q"] Nov 24 01:32:43 crc kubenswrapper[4755]: I1124 01:32:43.823646 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e","Type":"ContainerStarted","Data":"0535896372aabf6a1d0fe3e116c11970c7923c3760afb0cc9788eafd6c0fdfcd"} Nov 24 01:32:43 crc kubenswrapper[4755]: I1124 01:32:43.825200 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623","Type":"ContainerStarted","Data":"cb67e0b31205352c65de84aabecd564ecf4afe9734d738684b9c368f7188abf8"} Nov 24 01:32:43 crc kubenswrapper[4755]: I1124 01:32:43.827052 4755 generic.go:334] "Generic (PLEG): container finished" podID="40269c4d-1151-4a12-972b-bbc38662d04e" containerID="2e32b437a14ce3216d17cf1623a3a94175e633dc2760e42eb5347b6ae5e35ae7" exitCode=0 Nov 24 01:32:43 crc kubenswrapper[4755]: I1124 01:32:43.827093 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-7857q" event={"ID":"40269c4d-1151-4a12-972b-bbc38662d04e","Type":"ContainerDied","Data":"2e32b437a14ce3216d17cf1623a3a94175e633dc2760e42eb5347b6ae5e35ae7"} Nov 24 01:32:43 crc kubenswrapper[4755]: I1124 01:32:43.827137 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-7857q" event={"ID":"40269c4d-1151-4a12-972b-bbc38662d04e","Type":"ContainerStarted","Data":"23cbdeed2ab4cdc898462290d1122ddeef26db2bf685722380998c586f05c036"} Nov 24 01:32:44 crc kubenswrapper[4755]: I1124 01:32:44.838138 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-7857q" event={"ID":"40269c4d-1151-4a12-972b-bbc38662d04e","Type":"ContainerStarted","Data":"f4e9d94245f0b6bfce6f0573d63c4c2158e63641879326bbec6c270585755e62"} Nov 24 01:32:44 crc kubenswrapper[4755]: I1124 01:32:44.866215 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-7857q" podStartSLOduration=2.866191602 podStartE2EDuration="2.866191602s" podCreationTimestamp="2025-11-24 01:32:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:32:44.860393219 +0000 UTC m=+1189.546458730" watchObservedRunningTime="2025-11-24 01:32:44.866191602 +0000 UTC m=+1189.552257113" Nov 24 01:32:45 crc kubenswrapper[4755]: I1124 01:32:45.845297 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:52 crc kubenswrapper[4755]: I1124 01:32:52.954827 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.048943 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-hgwb2"] Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.049749 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" podUID="c80b623c-c6a3-403e-a79f-7e540ac99f4e" containerName="dnsmasq-dns" containerID="cri-o://855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe" gracePeriod=10 Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.158709 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-6vs5l"] Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.175683 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.198027 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-6vs5l"] Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.294332 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.294428 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-config\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.294492 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.294512 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-std86\" (UniqueName: \"kubernetes.io/projected/ae6a5980-1b82-42c7-b86c-109e43e389cd-kube-api-access-std86\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.294537 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.294575 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.294652 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.396952 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.397315 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-std86\" (UniqueName: \"kubernetes.io/projected/ae6a5980-1b82-42c7-b86c-109e43e389cd-kube-api-access-std86\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.397346 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.397399 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.397477 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.397540 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.397634 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-config\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.398553 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.398808 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.399033 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.399361 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.399483 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.399686 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae6a5980-1b82-42c7-b86c-109e43e389cd-config\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.429565 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-std86\" (UniqueName: \"kubernetes.io/projected/ae6a5980-1b82-42c7-b86c-109e43e389cd-kube-api-access-std86\") pod \"dnsmasq-dns-cb6ffcf87-6vs5l\" (UID: \"ae6a5980-1b82-42c7-b86c-109e43e389cd\") " pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.541327 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.545255 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.602740 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-svc\") pod \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.602880 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-sb\") pod \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.603004 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-swift-storage-0\") pod \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.603075 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-nb\") pod \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.603153 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-config\") pod \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.603182 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9k4g\" (UniqueName: \"kubernetes.io/projected/c80b623c-c6a3-403e-a79f-7e540ac99f4e-kube-api-access-s9k4g\") pod \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\" (UID: \"c80b623c-c6a3-403e-a79f-7e540ac99f4e\") " Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.615219 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c80b623c-c6a3-403e-a79f-7e540ac99f4e-kube-api-access-s9k4g" (OuterVolumeSpecName: "kube-api-access-s9k4g") pod "c80b623c-c6a3-403e-a79f-7e540ac99f4e" (UID: "c80b623c-c6a3-403e-a79f-7e540ac99f4e"). InnerVolumeSpecName "kube-api-access-s9k4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.666482 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c80b623c-c6a3-403e-a79f-7e540ac99f4e" (UID: "c80b623c-c6a3-403e-a79f-7e540ac99f4e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.671090 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c80b623c-c6a3-403e-a79f-7e540ac99f4e" (UID: "c80b623c-c6a3-403e-a79f-7e540ac99f4e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.706077 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9k4g\" (UniqueName: \"kubernetes.io/projected/c80b623c-c6a3-403e-a79f-7e540ac99f4e-kube-api-access-s9k4g\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.706288 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.706302 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.708501 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-config" (OuterVolumeSpecName: "config") pod "c80b623c-c6a3-403e-a79f-7e540ac99f4e" (UID: "c80b623c-c6a3-403e-a79f-7e540ac99f4e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.717196 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c80b623c-c6a3-403e-a79f-7e540ac99f4e" (UID: "c80b623c-c6a3-403e-a79f-7e540ac99f4e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.741662 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c80b623c-c6a3-403e-a79f-7e540ac99f4e" (UID: "c80b623c-c6a3-403e-a79f-7e540ac99f4e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.807868 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.807902 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.807914 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c80b623c-c6a3-403e-a79f-7e540ac99f4e-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.928100 4755 generic.go:334] "Generic (PLEG): container finished" podID="c80b623c-c6a3-403e-a79f-7e540ac99f4e" containerID="855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe" exitCode=0 Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.928145 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" event={"ID":"c80b623c-c6a3-403e-a79f-7e540ac99f4e","Type":"ContainerDied","Data":"855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe"} Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.928154 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.928178 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" event={"ID":"c80b623c-c6a3-403e-a79f-7e540ac99f4e","Type":"ContainerDied","Data":"1cc89a5387b20e3d1b2eda6d2582c1b0e32a3fd869542a9a4219a31628d6744a"} Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.928197 4755 scope.go:117] "RemoveContainer" containerID="855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.962852 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-hgwb2"] Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.964146 4755 scope.go:117] "RemoveContainer" containerID="e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.971778 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-hgwb2"] Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.988926 4755 scope.go:117] "RemoveContainer" containerID="855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe" Nov 24 01:32:53 crc kubenswrapper[4755]: E1124 01:32:53.990672 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe\": container with ID starting with 855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe not found: ID does not exist" containerID="855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.990728 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe"} err="failed to get container status \"855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe\": rpc error: code = NotFound desc = could not find container \"855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe\": container with ID starting with 855fdf3c7b782ce740e3f48c88ff781d164fe0c532f1d2da2ad54f625bd84fbe not found: ID does not exist" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.990761 4755 scope.go:117] "RemoveContainer" containerID="e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e" Nov 24 01:32:53 crc kubenswrapper[4755]: E1124 01:32:53.991154 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e\": container with ID starting with e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e not found: ID does not exist" containerID="e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e" Nov 24 01:32:53 crc kubenswrapper[4755]: I1124 01:32:53.991191 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e"} err="failed to get container status \"e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e\": rpc error: code = NotFound desc = could not find container \"e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e\": container with ID starting with e209f54246103e16a1afde95ab3a456634b4c3f5898a70d1384708b818400c7e not found: ID does not exist" Nov 24 01:32:54 crc kubenswrapper[4755]: I1124 01:32:54.063328 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c80b623c-c6a3-403e-a79f-7e540ac99f4e" path="/var/lib/kubelet/pods/c80b623c-c6a3-403e-a79f-7e540ac99f4e/volumes" Nov 24 01:32:54 crc kubenswrapper[4755]: I1124 01:32:54.098437 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-6vs5l"] Nov 24 01:32:54 crc kubenswrapper[4755]: W1124 01:32:54.100442 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae6a5980_1b82_42c7_b86c_109e43e389cd.slice/crio-34f20ac26650df8a42e8ae873518e8561e6298c9e26c82d0457377f0153d6c4b WatchSource:0}: Error finding container 34f20ac26650df8a42e8ae873518e8561e6298c9e26c82d0457377f0153d6c4b: Status 404 returned error can't find the container with id 34f20ac26650df8a42e8ae873518e8561e6298c9e26c82d0457377f0153d6c4b Nov 24 01:32:54 crc kubenswrapper[4755]: I1124 01:32:54.938992 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae6a5980-1b82-42c7-b86c-109e43e389cd" containerID="1353de26e626ac6399efdfb52b6cd2e6ed19f26671bdc762a8e7255723cc58eb" exitCode=0 Nov 24 01:32:54 crc kubenswrapper[4755]: I1124 01:32:54.939099 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" event={"ID":"ae6a5980-1b82-42c7-b86c-109e43e389cd","Type":"ContainerDied","Data":"1353de26e626ac6399efdfb52b6cd2e6ed19f26671bdc762a8e7255723cc58eb"} Nov 24 01:32:54 crc kubenswrapper[4755]: I1124 01:32:54.939272 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" event={"ID":"ae6a5980-1b82-42c7-b86c-109e43e389cd","Type":"ContainerStarted","Data":"34f20ac26650df8a42e8ae873518e8561e6298c9e26c82d0457377f0153d6c4b"} Nov 24 01:32:55 crc kubenswrapper[4755]: I1124 01:32:55.948862 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" event={"ID":"ae6a5980-1b82-42c7-b86c-109e43e389cd","Type":"ContainerStarted","Data":"9deec465ce6db2e4f674a29b91306223c465466f77b5c38f053cb725e9ae7637"} Nov 24 01:32:55 crc kubenswrapper[4755]: I1124 01:32:55.949215 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:32:55 crc kubenswrapper[4755]: I1124 01:32:55.984331 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" podStartSLOduration=2.984309562 podStartE2EDuration="2.984309562s" podCreationTimestamp="2025-11-24 01:32:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:32:55.976496762 +0000 UTC m=+1200.662562303" watchObservedRunningTime="2025-11-24 01:32:55.984309562 +0000 UTC m=+1200.670375063" Nov 24 01:32:58 crc kubenswrapper[4755]: I1124 01:32:58.322327 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-59cf4bdb65-hgwb2" podUID="c80b623c-c6a3-403e-a79f-7e540ac99f4e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.199:5353: i/o timeout" Nov 24 01:33:03 crc kubenswrapper[4755]: I1124 01:33:03.543797 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cb6ffcf87-6vs5l" Nov 24 01:33:03 crc kubenswrapper[4755]: I1124 01:33:03.645450 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-7857q"] Nov 24 01:33:03 crc kubenswrapper[4755]: I1124 01:33:03.645892 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-7857q" podUID="40269c4d-1151-4a12-972b-bbc38662d04e" containerName="dnsmasq-dns" containerID="cri-o://f4e9d94245f0b6bfce6f0573d63c4c2158e63641879326bbec6c270585755e62" gracePeriod=10 Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.043729 4755 generic.go:334] "Generic (PLEG): container finished" podID="40269c4d-1151-4a12-972b-bbc38662d04e" containerID="f4e9d94245f0b6bfce6f0573d63c4c2158e63641879326bbec6c270585755e62" exitCode=0 Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.043975 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-7857q" event={"ID":"40269c4d-1151-4a12-972b-bbc38662d04e","Type":"ContainerDied","Data":"f4e9d94245f0b6bfce6f0573d63c4c2158e63641879326bbec6c270585755e62"} Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.044001 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-7857q" event={"ID":"40269c4d-1151-4a12-972b-bbc38662d04e","Type":"ContainerDied","Data":"23cbdeed2ab4cdc898462290d1122ddeef26db2bf685722380998c586f05c036"} Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.044013 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23cbdeed2ab4cdc898462290d1122ddeef26db2bf685722380998c586f05c036" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.121397 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.157475 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-nb\") pod \"40269c4d-1151-4a12-972b-bbc38662d04e\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.157589 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-openstack-edpm-ipam\") pod \"40269c4d-1151-4a12-972b-bbc38662d04e\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.157676 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-sb\") pod \"40269c4d-1151-4a12-972b-bbc38662d04e\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.157756 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-swift-storage-0\") pod \"40269c4d-1151-4a12-972b-bbc38662d04e\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.157786 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bd4cf\" (UniqueName: \"kubernetes.io/projected/40269c4d-1151-4a12-972b-bbc38662d04e-kube-api-access-bd4cf\") pod \"40269c4d-1151-4a12-972b-bbc38662d04e\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.157840 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-svc\") pod \"40269c4d-1151-4a12-972b-bbc38662d04e\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.157941 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-config\") pod \"40269c4d-1151-4a12-972b-bbc38662d04e\" (UID: \"40269c4d-1151-4a12-972b-bbc38662d04e\") " Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.174423 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40269c4d-1151-4a12-972b-bbc38662d04e-kube-api-access-bd4cf" (OuterVolumeSpecName: "kube-api-access-bd4cf") pod "40269c4d-1151-4a12-972b-bbc38662d04e" (UID: "40269c4d-1151-4a12-972b-bbc38662d04e"). InnerVolumeSpecName "kube-api-access-bd4cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.216879 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "40269c4d-1151-4a12-972b-bbc38662d04e" (UID: "40269c4d-1151-4a12-972b-bbc38662d04e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.224198 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "40269c4d-1151-4a12-972b-bbc38662d04e" (UID: "40269c4d-1151-4a12-972b-bbc38662d04e"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.225321 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "40269c4d-1151-4a12-972b-bbc38662d04e" (UID: "40269c4d-1151-4a12-972b-bbc38662d04e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.236869 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-config" (OuterVolumeSpecName: "config") pod "40269c4d-1151-4a12-972b-bbc38662d04e" (UID: "40269c4d-1151-4a12-972b-bbc38662d04e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.241762 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "40269c4d-1151-4a12-972b-bbc38662d04e" (UID: "40269c4d-1151-4a12-972b-bbc38662d04e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.248493 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "40269c4d-1151-4a12-972b-bbc38662d04e" (UID: "40269c4d-1151-4a12-972b-bbc38662d04e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.260988 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bd4cf\" (UniqueName: \"kubernetes.io/projected/40269c4d-1151-4a12-972b-bbc38662d04e-kube-api-access-bd4cf\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.261035 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.261047 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-config\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.261063 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.261074 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.261085 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:04 crc kubenswrapper[4755]: I1124 01:33:04.261096 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/40269c4d-1151-4a12-972b-bbc38662d04e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:05 crc kubenswrapper[4755]: I1124 01:33:05.052581 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-7857q" Nov 24 01:33:05 crc kubenswrapper[4755]: I1124 01:33:05.085929 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-7857q"] Nov 24 01:33:05 crc kubenswrapper[4755]: I1124 01:33:05.093511 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-7857q"] Nov 24 01:33:06 crc kubenswrapper[4755]: I1124 01:33:06.017493 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40269c4d-1151-4a12-972b-bbc38662d04e" path="/var/lib/kubelet/pods/40269c4d-1151-4a12-972b-bbc38662d04e/volumes" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.168650 4755 generic.go:334] "Generic (PLEG): container finished" podID="47ce72c9-6a27-44ff-80ed-d844fa0f1d2e" containerID="0535896372aabf6a1d0fe3e116c11970c7923c3760afb0cc9788eafd6c0fdfcd" exitCode=0 Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.168733 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e","Type":"ContainerDied","Data":"0535896372aabf6a1d0fe3e116c11970c7923c3760afb0cc9788eafd6c0fdfcd"} Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.171668 4755 generic.go:334] "Generic (PLEG): container finished" podID="ba8cbfc3-74a5-4ea6-bd18-8fcab5462623" containerID="cb67e0b31205352c65de84aabecd564ecf4afe9734d738684b9c368f7188abf8" exitCode=0 Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.171713 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623","Type":"ContainerDied","Data":"cb67e0b31205352c65de84aabecd564ecf4afe9734d738684b9c368f7188abf8"} Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.954249 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd"] Nov 24 01:33:16 crc kubenswrapper[4755]: E1124 01:33:16.955843 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40269c4d-1151-4a12-972b-bbc38662d04e" containerName="init" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.955867 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="40269c4d-1151-4a12-972b-bbc38662d04e" containerName="init" Nov 24 01:33:16 crc kubenswrapper[4755]: E1124 01:33:16.956715 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40269c4d-1151-4a12-972b-bbc38662d04e" containerName="dnsmasq-dns" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.956740 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="40269c4d-1151-4a12-972b-bbc38662d04e" containerName="dnsmasq-dns" Nov 24 01:33:16 crc kubenswrapper[4755]: E1124 01:33:16.956764 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80b623c-c6a3-403e-a79f-7e540ac99f4e" containerName="init" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.956773 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80b623c-c6a3-403e-a79f-7e540ac99f4e" containerName="init" Nov 24 01:33:16 crc kubenswrapper[4755]: E1124 01:33:16.956783 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80b623c-c6a3-403e-a79f-7e540ac99f4e" containerName="dnsmasq-dns" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.956790 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80b623c-c6a3-403e-a79f-7e540ac99f4e" containerName="dnsmasq-dns" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.957039 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="40269c4d-1151-4a12-972b-bbc38662d04e" containerName="dnsmasq-dns" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.957064 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80b623c-c6a3-403e-a79f-7e540ac99f4e" containerName="dnsmasq-dns" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.957850 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.960670 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.960906 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.964227 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.964416 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.982251 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd"] Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.993722 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.993806 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.993841 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5r5bq\" (UniqueName: \"kubernetes.io/projected/96e0eeaf-102b-47ad-8f60-02115894de6e-kube-api-access-5r5bq\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:16 crc kubenswrapper[4755]: I1124 01:33:16.993923 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.095552 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.096571 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.096743 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.096841 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5r5bq\" (UniqueName: \"kubernetes.io/projected/96e0eeaf-102b-47ad-8f60-02115894de6e-kube-api-access-5r5bq\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.100553 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.101438 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.102801 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.121497 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5r5bq\" (UniqueName: \"kubernetes.io/projected/96e0eeaf-102b-47ad-8f60-02115894de6e-kube-api-access-5r5bq\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.184082 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ba8cbfc3-74a5-4ea6-bd18-8fcab5462623","Type":"ContainerStarted","Data":"b5620bc8575d29c5e5c631dddd6c3a8589e5b6fff264bcd429e07fad0a9eae00"} Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.184334 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.186725 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"47ce72c9-6a27-44ff-80ed-d844fa0f1d2e","Type":"ContainerStarted","Data":"d06b60ea1846a7b4f32430cac6978b7dad59940a5c352a8a49a7324691b26f07"} Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.186928 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.214245 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.214221989 podStartE2EDuration="36.214221989s" podCreationTimestamp="2025-11-24 01:32:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:33:17.212091509 +0000 UTC m=+1221.898157050" watchObservedRunningTime="2025-11-24 01:33:17.214221989 +0000 UTC m=+1221.900287530" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.247196 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.247171707 podStartE2EDuration="37.247171707s" podCreationTimestamp="2025-11-24 01:32:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:33:17.237918116 +0000 UTC m=+1221.923983647" watchObservedRunningTime="2025-11-24 01:33:17.247171707 +0000 UTC m=+1221.933237218" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.284343 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.874919 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd"] Nov 24 01:33:17 crc kubenswrapper[4755]: W1124 01:33:17.882904 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96e0eeaf_102b_47ad_8f60_02115894de6e.slice/crio-417452fd8c996d25623305a5571e5ee3f5faf0dae42174894751985d04f9e71c WatchSource:0}: Error finding container 417452fd8c996d25623305a5571e5ee3f5faf0dae42174894751985d04f9e71c: Status 404 returned error can't find the container with id 417452fd8c996d25623305a5571e5ee3f5faf0dae42174894751985d04f9e71c Nov 24 01:33:17 crc kubenswrapper[4755]: I1124 01:33:17.885120 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 01:33:18 crc kubenswrapper[4755]: I1124 01:33:18.197799 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" event={"ID":"96e0eeaf-102b-47ad-8f60-02115894de6e","Type":"ContainerStarted","Data":"417452fd8c996d25623305a5571e5ee3f5faf0dae42174894751985d04f9e71c"} Nov 24 01:33:27 crc kubenswrapper[4755]: I1124 01:33:27.298089 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" event={"ID":"96e0eeaf-102b-47ad-8f60-02115894de6e","Type":"ContainerStarted","Data":"6121b5fd3f48d59fc16de1604449e21a923bb5d826ac30aa6582251b525422e8"} Nov 24 01:33:27 crc kubenswrapper[4755]: I1124 01:33:27.324723 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" podStartSLOduration=2.535754919 podStartE2EDuration="11.32470251s" podCreationTimestamp="2025-11-24 01:33:16 +0000 UTC" firstStartedPulling="2025-11-24 01:33:17.884869599 +0000 UTC m=+1222.570935100" lastFinishedPulling="2025-11-24 01:33:26.67381718 +0000 UTC m=+1231.359882691" observedRunningTime="2025-11-24 01:33:27.320767851 +0000 UTC m=+1232.006833362" watchObservedRunningTime="2025-11-24 01:33:27.32470251 +0000 UTC m=+1232.010768011" Nov 24 01:33:31 crc kubenswrapper[4755]: I1124 01:33:31.280829 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 24 01:33:31 crc kubenswrapper[4755]: I1124 01:33:31.563921 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 24 01:33:38 crc kubenswrapper[4755]: I1124 01:33:38.396242 4755 generic.go:334] "Generic (PLEG): container finished" podID="96e0eeaf-102b-47ad-8f60-02115894de6e" containerID="6121b5fd3f48d59fc16de1604449e21a923bb5d826ac30aa6582251b525422e8" exitCode=0 Nov 24 01:33:38 crc kubenswrapper[4755]: I1124 01:33:38.396355 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" event={"ID":"96e0eeaf-102b-47ad-8f60-02115894de6e","Type":"ContainerDied","Data":"6121b5fd3f48d59fc16de1604449e21a923bb5d826ac30aa6582251b525422e8"} Nov 24 01:33:39 crc kubenswrapper[4755]: I1124 01:33:39.839969 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:39 crc kubenswrapper[4755]: I1124 01:33:39.905635 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-repo-setup-combined-ca-bundle\") pod \"96e0eeaf-102b-47ad-8f60-02115894de6e\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " Nov 24 01:33:39 crc kubenswrapper[4755]: I1124 01:33:39.905736 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5r5bq\" (UniqueName: \"kubernetes.io/projected/96e0eeaf-102b-47ad-8f60-02115894de6e-kube-api-access-5r5bq\") pod \"96e0eeaf-102b-47ad-8f60-02115894de6e\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " Nov 24 01:33:39 crc kubenswrapper[4755]: I1124 01:33:39.905781 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-inventory\") pod \"96e0eeaf-102b-47ad-8f60-02115894de6e\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " Nov 24 01:33:39 crc kubenswrapper[4755]: I1124 01:33:39.905812 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-ssh-key\") pod \"96e0eeaf-102b-47ad-8f60-02115894de6e\" (UID: \"96e0eeaf-102b-47ad-8f60-02115894de6e\") " Nov 24 01:33:39 crc kubenswrapper[4755]: I1124 01:33:39.911231 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96e0eeaf-102b-47ad-8f60-02115894de6e-kube-api-access-5r5bq" (OuterVolumeSpecName: "kube-api-access-5r5bq") pod "96e0eeaf-102b-47ad-8f60-02115894de6e" (UID: "96e0eeaf-102b-47ad-8f60-02115894de6e"). InnerVolumeSpecName "kube-api-access-5r5bq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:33:39 crc kubenswrapper[4755]: I1124 01:33:39.911643 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "96e0eeaf-102b-47ad-8f60-02115894de6e" (UID: "96e0eeaf-102b-47ad-8f60-02115894de6e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:33:39 crc kubenswrapper[4755]: I1124 01:33:39.931042 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "96e0eeaf-102b-47ad-8f60-02115894de6e" (UID: "96e0eeaf-102b-47ad-8f60-02115894de6e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:33:39 crc kubenswrapper[4755]: I1124 01:33:39.952834 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-inventory" (OuterVolumeSpecName: "inventory") pod "96e0eeaf-102b-47ad-8f60-02115894de6e" (UID: "96e0eeaf-102b-47ad-8f60-02115894de6e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.007517 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5r5bq\" (UniqueName: \"kubernetes.io/projected/96e0eeaf-102b-47ad-8f60-02115894de6e-kube-api-access-5r5bq\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.007550 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.007561 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.007572 4755 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96e0eeaf-102b-47ad-8f60-02115894de6e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.420772 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" event={"ID":"96e0eeaf-102b-47ad-8f60-02115894de6e","Type":"ContainerDied","Data":"417452fd8c996d25623305a5571e5ee3f5faf0dae42174894751985d04f9e71c"} Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.421181 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="417452fd8c996d25623305a5571e5ee3f5faf0dae42174894751985d04f9e71c" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.420862 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.520368 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8"] Nov 24 01:33:40 crc kubenswrapper[4755]: E1124 01:33:40.520742 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96e0eeaf-102b-47ad-8f60-02115894de6e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.520760 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="96e0eeaf-102b-47ad-8f60-02115894de6e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.520948 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="96e0eeaf-102b-47ad-8f60-02115894de6e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.521502 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.523722 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.523973 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.524200 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.529426 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.540692 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8"] Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.617003 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m85m8\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.617038 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m85m8\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.617183 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjf8h\" (UniqueName: \"kubernetes.io/projected/ff52144e-9604-44f1-9af6-65f8c9928560-kube-api-access-qjf8h\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m85m8\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.718894 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m85m8\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.718978 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m85m8\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.719131 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjf8h\" (UniqueName: \"kubernetes.io/projected/ff52144e-9604-44f1-9af6-65f8c9928560-kube-api-access-qjf8h\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m85m8\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.723995 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m85m8\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.726903 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m85m8\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.749019 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjf8h\" (UniqueName: \"kubernetes.io/projected/ff52144e-9604-44f1-9af6-65f8c9928560-kube-api-access-qjf8h\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-m85m8\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:40 crc kubenswrapper[4755]: I1124 01:33:40.879448 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:41 crc kubenswrapper[4755]: I1124 01:33:41.400126 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8"] Nov 24 01:33:41 crc kubenswrapper[4755]: I1124 01:33:41.431070 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" event={"ID":"ff52144e-9604-44f1-9af6-65f8c9928560","Type":"ContainerStarted","Data":"4193e298f9251851248c2358ade7d2a95f80280fa382f50fb25aa188f11416a3"} Nov 24 01:33:42 crc kubenswrapper[4755]: I1124 01:33:42.445863 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" event={"ID":"ff52144e-9604-44f1-9af6-65f8c9928560","Type":"ContainerStarted","Data":"307ce660b44325fba61801d4c4c67ae30ec12a73eb34fe180d7c20c0dae4a455"} Nov 24 01:33:42 crc kubenswrapper[4755]: I1124 01:33:42.475341 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" podStartSLOduration=1.9535252029999999 podStartE2EDuration="2.475322242s" podCreationTimestamp="2025-11-24 01:33:40 +0000 UTC" firstStartedPulling="2025-11-24 01:33:41.400023462 +0000 UTC m=+1246.086088963" lastFinishedPulling="2025-11-24 01:33:41.921820461 +0000 UTC m=+1246.607886002" observedRunningTime="2025-11-24 01:33:42.468831721 +0000 UTC m=+1247.154897252" watchObservedRunningTime="2025-11-24 01:33:42.475322242 +0000 UTC m=+1247.161387733" Nov 24 01:33:45 crc kubenswrapper[4755]: I1124 01:33:45.476876 4755 generic.go:334] "Generic (PLEG): container finished" podID="ff52144e-9604-44f1-9af6-65f8c9928560" containerID="307ce660b44325fba61801d4c4c67ae30ec12a73eb34fe180d7c20c0dae4a455" exitCode=0 Nov 24 01:33:45 crc kubenswrapper[4755]: I1124 01:33:45.476974 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" event={"ID":"ff52144e-9604-44f1-9af6-65f8c9928560","Type":"ContainerDied","Data":"307ce660b44325fba61801d4c4c67ae30ec12a73eb34fe180d7c20c0dae4a455"} Nov 24 01:33:46 crc kubenswrapper[4755]: I1124 01:33:46.950808 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.128784 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-inventory\") pod \"ff52144e-9604-44f1-9af6-65f8c9928560\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.128908 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjf8h\" (UniqueName: \"kubernetes.io/projected/ff52144e-9604-44f1-9af6-65f8c9928560-kube-api-access-qjf8h\") pod \"ff52144e-9604-44f1-9af6-65f8c9928560\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.129040 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-ssh-key\") pod \"ff52144e-9604-44f1-9af6-65f8c9928560\" (UID: \"ff52144e-9604-44f1-9af6-65f8c9928560\") " Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.134786 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff52144e-9604-44f1-9af6-65f8c9928560-kube-api-access-qjf8h" (OuterVolumeSpecName: "kube-api-access-qjf8h") pod "ff52144e-9604-44f1-9af6-65f8c9928560" (UID: "ff52144e-9604-44f1-9af6-65f8c9928560"). InnerVolumeSpecName "kube-api-access-qjf8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.157351 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-inventory" (OuterVolumeSpecName: "inventory") pod "ff52144e-9604-44f1-9af6-65f8c9928560" (UID: "ff52144e-9604-44f1-9af6-65f8c9928560"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.157489 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ff52144e-9604-44f1-9af6-65f8c9928560" (UID: "ff52144e-9604-44f1-9af6-65f8c9928560"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.231667 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.231698 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff52144e-9604-44f1-9af6-65f8c9928560-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.231709 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjf8h\" (UniqueName: \"kubernetes.io/projected/ff52144e-9604-44f1-9af6-65f8c9928560-kube-api-access-qjf8h\") on node \"crc\" DevicePath \"\"" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.521715 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" event={"ID":"ff52144e-9604-44f1-9af6-65f8c9928560","Type":"ContainerDied","Data":"4193e298f9251851248c2358ade7d2a95f80280fa382f50fb25aa188f11416a3"} Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.528966 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4193e298f9251851248c2358ade7d2a95f80280fa382f50fb25aa188f11416a3" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.522884 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-m85m8" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.578721 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd"] Nov 24 01:33:47 crc kubenswrapper[4755]: E1124 01:33:47.579439 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff52144e-9604-44f1-9af6-65f8c9928560" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.579460 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff52144e-9604-44f1-9af6-65f8c9928560" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.579728 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff52144e-9604-44f1-9af6-65f8c9928560" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.580467 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.582475 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.582816 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.583101 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.583262 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.606619 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd"] Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.638908 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.638953 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dq7z\" (UniqueName: \"kubernetes.io/projected/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-kube-api-access-4dq7z\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.639049 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.639074 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.740625 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.740687 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.740887 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.740924 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dq7z\" (UniqueName: \"kubernetes.io/projected/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-kube-api-access-4dq7z\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.746255 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.746255 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.748648 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.756202 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dq7z\" (UniqueName: \"kubernetes.io/projected/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-kube-api-access-4dq7z\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:47 crc kubenswrapper[4755]: I1124 01:33:47.900372 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:33:48 crc kubenswrapper[4755]: W1124 01:33:48.411357 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1f11949_be37_4a9d_9e73_b0cbc20a6d1e.slice/crio-82eb0f672156115c4579e69187b88e8c459f1b5bc8f65fa92e54295c8a878380 WatchSource:0}: Error finding container 82eb0f672156115c4579e69187b88e8c459f1b5bc8f65fa92e54295c8a878380: Status 404 returned error can't find the container with id 82eb0f672156115c4579e69187b88e8c459f1b5bc8f65fa92e54295c8a878380 Nov 24 01:33:48 crc kubenswrapper[4755]: I1124 01:33:48.412014 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd"] Nov 24 01:33:48 crc kubenswrapper[4755]: I1124 01:33:48.530913 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" event={"ID":"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e","Type":"ContainerStarted","Data":"82eb0f672156115c4579e69187b88e8c459f1b5bc8f65fa92e54295c8a878380"} Nov 24 01:33:49 crc kubenswrapper[4755]: I1124 01:33:49.539344 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" event={"ID":"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e","Type":"ContainerStarted","Data":"64657c7f5280a01e20900ba86c75810422421c5fb032f29f74cc15b42b50e1e2"} Nov 24 01:33:49 crc kubenswrapper[4755]: I1124 01:33:49.566059 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" podStartSLOduration=2.143836387 podStartE2EDuration="2.566038275s" podCreationTimestamp="2025-11-24 01:33:47 +0000 UTC" firstStartedPulling="2025-11-24 01:33:48.414988248 +0000 UTC m=+1253.101053749" lastFinishedPulling="2025-11-24 01:33:48.837190126 +0000 UTC m=+1253.523255637" observedRunningTime="2025-11-24 01:33:49.559340129 +0000 UTC m=+1254.245405670" watchObservedRunningTime="2025-11-24 01:33:49.566038275 +0000 UTC m=+1254.252103776" Nov 24 01:34:33 crc kubenswrapper[4755]: I1124 01:34:33.294878 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:34:33 crc kubenswrapper[4755]: I1124 01:34:33.295310 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:35:01 crc kubenswrapper[4755]: I1124 01:35:01.616431 4755 scope.go:117] "RemoveContainer" containerID="8148e6250c00a97e6011644b1457a767cd2a682ac0406bba9dbb0da688fde8ae" Nov 24 01:35:01 crc kubenswrapper[4755]: I1124 01:35:01.669718 4755 scope.go:117] "RemoveContainer" containerID="0fd999277818271c6f1cd4138ef94919ae3efab382f7c50cf77c5f04729957de" Nov 24 01:35:01 crc kubenswrapper[4755]: I1124 01:35:01.717392 4755 scope.go:117] "RemoveContainer" containerID="53b4f57f1b325411337fe5690099efb35761bc35ef40ea8dfb11e204f708a43a" Nov 24 01:35:03 crc kubenswrapper[4755]: I1124 01:35:03.295851 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:35:03 crc kubenswrapper[4755]: I1124 01:35:03.296300 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:35:33 crc kubenswrapper[4755]: I1124 01:35:33.295532 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:35:33 crc kubenswrapper[4755]: I1124 01:35:33.296283 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:35:33 crc kubenswrapper[4755]: I1124 01:35:33.296323 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:35:33 crc kubenswrapper[4755]: I1124 01:35:33.297035 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6046690ccd6cfb15aff12bbb82767c5a83c9c84c292122438b2c2722b65a5466"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:35:33 crc kubenswrapper[4755]: I1124 01:35:33.297088 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://6046690ccd6cfb15aff12bbb82767c5a83c9c84c292122438b2c2722b65a5466" gracePeriod=600 Nov 24 01:35:33 crc kubenswrapper[4755]: I1124 01:35:33.629464 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="6046690ccd6cfb15aff12bbb82767c5a83c9c84c292122438b2c2722b65a5466" exitCode=0 Nov 24 01:35:33 crc kubenswrapper[4755]: I1124 01:35:33.629596 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"6046690ccd6cfb15aff12bbb82767c5a83c9c84c292122438b2c2722b65a5466"} Nov 24 01:35:33 crc kubenswrapper[4755]: I1124 01:35:33.629846 4755 scope.go:117] "RemoveContainer" containerID="77857823e666ac5615c021b67cb4fcc6f558c850cc69c4d388ccf77b95626fc7" Nov 24 01:35:34 crc kubenswrapper[4755]: I1124 01:35:34.642649 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3"} Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.336991 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c4k9m"] Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.339511 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.362304 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c4k9m"] Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.425774 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65vpb\" (UniqueName: \"kubernetes.io/projected/cc9db566-3948-49ef-8b52-55372468755a-kube-api-access-65vpb\") pod \"redhat-operators-c4k9m\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.425866 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-catalog-content\") pod \"redhat-operators-c4k9m\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.426097 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-utilities\") pod \"redhat-operators-c4k9m\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.527801 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-utilities\") pod \"redhat-operators-c4k9m\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.527976 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65vpb\" (UniqueName: \"kubernetes.io/projected/cc9db566-3948-49ef-8b52-55372468755a-kube-api-access-65vpb\") pod \"redhat-operators-c4k9m\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.528008 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-catalog-content\") pod \"redhat-operators-c4k9m\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.528479 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-catalog-content\") pod \"redhat-operators-c4k9m\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.528551 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-utilities\") pod \"redhat-operators-c4k9m\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.547328 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65vpb\" (UniqueName: \"kubernetes.io/projected/cc9db566-3948-49ef-8b52-55372468755a-kube-api-access-65vpb\") pod \"redhat-operators-c4k9m\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:00 crc kubenswrapper[4755]: I1124 01:36:00.709508 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:01 crc kubenswrapper[4755]: I1124 01:36:01.189157 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c4k9m"] Nov 24 01:36:01 crc kubenswrapper[4755]: I1124 01:36:01.853516 4755 scope.go:117] "RemoveContainer" containerID="143f346afc30989f3a1a8234b200dbb402a93e3e31e8619f3ba263f982114bad" Nov 24 01:36:01 crc kubenswrapper[4755]: I1124 01:36:01.898364 4755 scope.go:117] "RemoveContainer" containerID="94d632be6382cb5836e89b418047b89ca951ebfa9566582dfdc70d9595841609" Nov 24 01:36:01 crc kubenswrapper[4755]: I1124 01:36:01.917324 4755 generic.go:334] "Generic (PLEG): container finished" podID="cc9db566-3948-49ef-8b52-55372468755a" containerID="5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40" exitCode=0 Nov 24 01:36:01 crc kubenswrapper[4755]: I1124 01:36:01.917472 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4k9m" event={"ID":"cc9db566-3948-49ef-8b52-55372468755a","Type":"ContainerDied","Data":"5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40"} Nov 24 01:36:01 crc kubenswrapper[4755]: I1124 01:36:01.917586 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4k9m" event={"ID":"cc9db566-3948-49ef-8b52-55372468755a","Type":"ContainerStarted","Data":"49c302508861b1c3ae2a3f071c5e953a1f1b15e1e55157f8590913410f3735c7"} Nov 24 01:36:03 crc kubenswrapper[4755]: I1124 01:36:03.942476 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4k9m" event={"ID":"cc9db566-3948-49ef-8b52-55372468755a","Type":"ContainerStarted","Data":"8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1"} Nov 24 01:36:04 crc kubenswrapper[4755]: I1124 01:36:04.966753 4755 generic.go:334] "Generic (PLEG): container finished" podID="cc9db566-3948-49ef-8b52-55372468755a" containerID="8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1" exitCode=0 Nov 24 01:36:04 crc kubenswrapper[4755]: I1124 01:36:04.966840 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4k9m" event={"ID":"cc9db566-3948-49ef-8b52-55372468755a","Type":"ContainerDied","Data":"8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1"} Nov 24 01:36:05 crc kubenswrapper[4755]: I1124 01:36:05.983416 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4k9m" event={"ID":"cc9db566-3948-49ef-8b52-55372468755a","Type":"ContainerStarted","Data":"54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e"} Nov 24 01:36:06 crc kubenswrapper[4755]: I1124 01:36:06.017313 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c4k9m" podStartSLOduration=2.542001591 podStartE2EDuration="6.017277923s" podCreationTimestamp="2025-11-24 01:36:00 +0000 UTC" firstStartedPulling="2025-11-24 01:36:01.920146878 +0000 UTC m=+1386.606212389" lastFinishedPulling="2025-11-24 01:36:05.39542321 +0000 UTC m=+1390.081488721" observedRunningTime="2025-11-24 01:36:05.999249198 +0000 UTC m=+1390.685314720" watchObservedRunningTime="2025-11-24 01:36:06.017277923 +0000 UTC m=+1390.703343464" Nov 24 01:36:10 crc kubenswrapper[4755]: I1124 01:36:10.711158 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:10 crc kubenswrapper[4755]: I1124 01:36:10.711907 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:11 crc kubenswrapper[4755]: I1124 01:36:11.770617 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-c4k9m" podUID="cc9db566-3948-49ef-8b52-55372468755a" containerName="registry-server" probeResult="failure" output=< Nov 24 01:36:11 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Nov 24 01:36:11 crc kubenswrapper[4755]: > Nov 24 01:36:20 crc kubenswrapper[4755]: I1124 01:36:20.776286 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:20 crc kubenswrapper[4755]: I1124 01:36:20.828985 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:21 crc kubenswrapper[4755]: I1124 01:36:21.105391 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c4k9m"] Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.146256 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c4k9m" podUID="cc9db566-3948-49ef-8b52-55372468755a" containerName="registry-server" containerID="cri-o://54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e" gracePeriod=2 Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.616765 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.760397 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65vpb\" (UniqueName: \"kubernetes.io/projected/cc9db566-3948-49ef-8b52-55372468755a-kube-api-access-65vpb\") pod \"cc9db566-3948-49ef-8b52-55372468755a\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.760679 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-catalog-content\") pod \"cc9db566-3948-49ef-8b52-55372468755a\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.760839 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-utilities\") pod \"cc9db566-3948-49ef-8b52-55372468755a\" (UID: \"cc9db566-3948-49ef-8b52-55372468755a\") " Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.761439 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-utilities" (OuterVolumeSpecName: "utilities") pod "cc9db566-3948-49ef-8b52-55372468755a" (UID: "cc9db566-3948-49ef-8b52-55372468755a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.761578 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.769340 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc9db566-3948-49ef-8b52-55372468755a-kube-api-access-65vpb" (OuterVolumeSpecName: "kube-api-access-65vpb") pod "cc9db566-3948-49ef-8b52-55372468755a" (UID: "cc9db566-3948-49ef-8b52-55372468755a"). InnerVolumeSpecName "kube-api-access-65vpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.864138 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65vpb\" (UniqueName: \"kubernetes.io/projected/cc9db566-3948-49ef-8b52-55372468755a-kube-api-access-65vpb\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.870207 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cc9db566-3948-49ef-8b52-55372468755a" (UID: "cc9db566-3948-49ef-8b52-55372468755a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:36:22 crc kubenswrapper[4755]: I1124 01:36:22.966177 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc9db566-3948-49ef-8b52-55372468755a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.157073 4755 generic.go:334] "Generic (PLEG): container finished" podID="cc9db566-3948-49ef-8b52-55372468755a" containerID="54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e" exitCode=0 Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.157121 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4k9m" event={"ID":"cc9db566-3948-49ef-8b52-55372468755a","Type":"ContainerDied","Data":"54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e"} Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.157134 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4k9m" Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.157152 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4k9m" event={"ID":"cc9db566-3948-49ef-8b52-55372468755a","Type":"ContainerDied","Data":"49c302508861b1c3ae2a3f071c5e953a1f1b15e1e55157f8590913410f3735c7"} Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.157176 4755 scope.go:117] "RemoveContainer" containerID="54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e" Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.178427 4755 scope.go:117] "RemoveContainer" containerID="8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1" Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.190071 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c4k9m"] Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.198935 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c4k9m"] Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.216953 4755 scope.go:117] "RemoveContainer" containerID="5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40" Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.246786 4755 scope.go:117] "RemoveContainer" containerID="54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e" Nov 24 01:36:23 crc kubenswrapper[4755]: E1124 01:36:23.247086 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e\": container with ID starting with 54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e not found: ID does not exist" containerID="54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e" Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.247115 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e"} err="failed to get container status \"54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e\": rpc error: code = NotFound desc = could not find container \"54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e\": container with ID starting with 54915e79f21a912481e21b1731ee337c03f2284a9b97004b6bc2a04ef0f93d4e not found: ID does not exist" Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.247135 4755 scope.go:117] "RemoveContainer" containerID="8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1" Nov 24 01:36:23 crc kubenswrapper[4755]: E1124 01:36:23.247430 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1\": container with ID starting with 8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1 not found: ID does not exist" containerID="8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1" Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.247485 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1"} err="failed to get container status \"8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1\": rpc error: code = NotFound desc = could not find container \"8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1\": container with ID starting with 8db9473005673f1b4533039438c836018da960f7b490f692d6dc43fc2a7270e1 not found: ID does not exist" Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.247522 4755 scope.go:117] "RemoveContainer" containerID="5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40" Nov 24 01:36:23 crc kubenswrapper[4755]: E1124 01:36:23.247896 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40\": container with ID starting with 5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40 not found: ID does not exist" containerID="5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40" Nov 24 01:36:23 crc kubenswrapper[4755]: I1124 01:36:23.247928 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40"} err="failed to get container status \"5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40\": rpc error: code = NotFound desc = could not find container \"5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40\": container with ID starting with 5faac082e19ca16012cdc4525f957d41992fac1e5bcdddb742a354bef7641a40 not found: ID does not exist" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.005767 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc9db566-3948-49ef-8b52-55372468755a" path="/var/lib/kubelet/pods/cc9db566-3948-49ef-8b52-55372468755a/volumes" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.023991 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nvn2q"] Nov 24 01:36:24 crc kubenswrapper[4755]: E1124 01:36:24.024431 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc9db566-3948-49ef-8b52-55372468755a" containerName="registry-server" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.024449 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc9db566-3948-49ef-8b52-55372468755a" containerName="registry-server" Nov 24 01:36:24 crc kubenswrapper[4755]: E1124 01:36:24.024480 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc9db566-3948-49ef-8b52-55372468755a" containerName="extract-utilities" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.024487 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc9db566-3948-49ef-8b52-55372468755a" containerName="extract-utilities" Nov 24 01:36:24 crc kubenswrapper[4755]: E1124 01:36:24.024538 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc9db566-3948-49ef-8b52-55372468755a" containerName="extract-content" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.024562 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc9db566-3948-49ef-8b52-55372468755a" containerName="extract-content" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.024810 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc9db566-3948-49ef-8b52-55372468755a" containerName="registry-server" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.026176 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.032977 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nvn2q"] Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.094371 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-utilities\") pod \"redhat-marketplace-nvn2q\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.094427 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-catalog-content\") pod \"redhat-marketplace-nvn2q\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.094917 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl2p7\" (UniqueName: \"kubernetes.io/projected/2bbb66c4-2f51-4963-afc0-404379ad531c-kube-api-access-zl2p7\") pod \"redhat-marketplace-nvn2q\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.196768 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl2p7\" (UniqueName: \"kubernetes.io/projected/2bbb66c4-2f51-4963-afc0-404379ad531c-kube-api-access-zl2p7\") pod \"redhat-marketplace-nvn2q\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.196904 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-utilities\") pod \"redhat-marketplace-nvn2q\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.196934 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-catalog-content\") pod \"redhat-marketplace-nvn2q\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.197495 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-utilities\") pod \"redhat-marketplace-nvn2q\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.197528 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-catalog-content\") pod \"redhat-marketplace-nvn2q\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.220493 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl2p7\" (UniqueName: \"kubernetes.io/projected/2bbb66c4-2f51-4963-afc0-404379ad531c-kube-api-access-zl2p7\") pod \"redhat-marketplace-nvn2q\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:24 crc kubenswrapper[4755]: I1124 01:36:24.345984 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:25 crc kubenswrapper[4755]: I1124 01:36:24.804000 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nvn2q"] Nov 24 01:36:25 crc kubenswrapper[4755]: I1124 01:36:25.178263 4755 generic.go:334] "Generic (PLEG): container finished" podID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerID="d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f" exitCode=0 Nov 24 01:36:25 crc kubenswrapper[4755]: I1124 01:36:25.178355 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nvn2q" event={"ID":"2bbb66c4-2f51-4963-afc0-404379ad531c","Type":"ContainerDied","Data":"d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f"} Nov 24 01:36:25 crc kubenswrapper[4755]: I1124 01:36:25.178541 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nvn2q" event={"ID":"2bbb66c4-2f51-4963-afc0-404379ad531c","Type":"ContainerStarted","Data":"5d472a6b617dd3fb8cd0d7ac7bca80fe5338be740364dc1347ca2093405cab07"} Nov 24 01:36:27 crc kubenswrapper[4755]: I1124 01:36:27.198760 4755 generic.go:334] "Generic (PLEG): container finished" podID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerID="5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7" exitCode=0 Nov 24 01:36:27 crc kubenswrapper[4755]: I1124 01:36:27.198874 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nvn2q" event={"ID":"2bbb66c4-2f51-4963-afc0-404379ad531c","Type":"ContainerDied","Data":"5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7"} Nov 24 01:36:28 crc kubenswrapper[4755]: I1124 01:36:28.213175 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nvn2q" event={"ID":"2bbb66c4-2f51-4963-afc0-404379ad531c","Type":"ContainerStarted","Data":"8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c"} Nov 24 01:36:28 crc kubenswrapper[4755]: I1124 01:36:28.235144 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nvn2q" podStartSLOduration=1.8161467089999999 podStartE2EDuration="4.235121758s" podCreationTimestamp="2025-11-24 01:36:24 +0000 UTC" firstStartedPulling="2025-11-24 01:36:25.180694244 +0000 UTC m=+1409.866759745" lastFinishedPulling="2025-11-24 01:36:27.599669293 +0000 UTC m=+1412.285734794" observedRunningTime="2025-11-24 01:36:28.233735879 +0000 UTC m=+1412.919801380" watchObservedRunningTime="2025-11-24 01:36:28.235121758 +0000 UTC m=+1412.921187259" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.423324 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9mldg"] Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.428481 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.440849 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9mldg"] Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.460056 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkdpn\" (UniqueName: \"kubernetes.io/projected/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-kube-api-access-vkdpn\") pod \"certified-operators-9mldg\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.460227 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-catalog-content\") pod \"certified-operators-9mldg\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.460273 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-utilities\") pod \"certified-operators-9mldg\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.561832 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkdpn\" (UniqueName: \"kubernetes.io/projected/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-kube-api-access-vkdpn\") pod \"certified-operators-9mldg\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.561993 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-catalog-content\") pod \"certified-operators-9mldg\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.562065 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-utilities\") pod \"certified-operators-9mldg\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.562535 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-catalog-content\") pod \"certified-operators-9mldg\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.562596 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-utilities\") pod \"certified-operators-9mldg\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.582130 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkdpn\" (UniqueName: \"kubernetes.io/projected/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-kube-api-access-vkdpn\") pod \"certified-operators-9mldg\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:32 crc kubenswrapper[4755]: I1124 01:36:32.747997 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:33 crc kubenswrapper[4755]: I1124 01:36:33.064979 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9mldg"] Nov 24 01:36:33 crc kubenswrapper[4755]: I1124 01:36:33.258363 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9mldg" event={"ID":"6461d6fb-cbe5-4035-9414-10bb3abe8bdd","Type":"ContainerStarted","Data":"654789da65c0252ddf05d55beb2f134518f9abd5beeb11eb085275e723427ddf"} Nov 24 01:36:34 crc kubenswrapper[4755]: I1124 01:36:34.269840 4755 generic.go:334] "Generic (PLEG): container finished" podID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerID="f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34" exitCode=0 Nov 24 01:36:34 crc kubenswrapper[4755]: I1124 01:36:34.269917 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9mldg" event={"ID":"6461d6fb-cbe5-4035-9414-10bb3abe8bdd","Type":"ContainerDied","Data":"f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34"} Nov 24 01:36:34 crc kubenswrapper[4755]: I1124 01:36:34.346557 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:34 crc kubenswrapper[4755]: I1124 01:36:34.346671 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:34 crc kubenswrapper[4755]: I1124 01:36:34.390843 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:35 crc kubenswrapper[4755]: I1124 01:36:35.282784 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9mldg" event={"ID":"6461d6fb-cbe5-4035-9414-10bb3abe8bdd","Type":"ContainerStarted","Data":"0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00"} Nov 24 01:36:35 crc kubenswrapper[4755]: I1124 01:36:35.333624 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:36 crc kubenswrapper[4755]: I1124 01:36:36.296832 4755 generic.go:334] "Generic (PLEG): container finished" podID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerID="0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00" exitCode=0 Nov 24 01:36:36 crc kubenswrapper[4755]: I1124 01:36:36.296892 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9mldg" event={"ID":"6461d6fb-cbe5-4035-9414-10bb3abe8bdd","Type":"ContainerDied","Data":"0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00"} Nov 24 01:36:36 crc kubenswrapper[4755]: I1124 01:36:36.822666 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nvn2q"] Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.309233 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9mldg" event={"ID":"6461d6fb-cbe5-4035-9414-10bb3abe8bdd","Type":"ContainerStarted","Data":"5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff"} Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.309979 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nvn2q" podUID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerName="registry-server" containerID="cri-o://8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c" gracePeriod=2 Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.331850 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9mldg" podStartSLOduration=2.847198139 podStartE2EDuration="5.331829446s" podCreationTimestamp="2025-11-24 01:36:32 +0000 UTC" firstStartedPulling="2025-11-24 01:36:34.272499655 +0000 UTC m=+1418.958565156" lastFinishedPulling="2025-11-24 01:36:36.757130922 +0000 UTC m=+1421.443196463" observedRunningTime="2025-11-24 01:36:37.324206433 +0000 UTC m=+1422.010271934" watchObservedRunningTime="2025-11-24 01:36:37.331829446 +0000 UTC m=+1422.017894947" Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.741962 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.759740 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-catalog-content\") pod \"2bbb66c4-2f51-4963-afc0-404379ad531c\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.759905 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zl2p7\" (UniqueName: \"kubernetes.io/projected/2bbb66c4-2f51-4963-afc0-404379ad531c-kube-api-access-zl2p7\") pod \"2bbb66c4-2f51-4963-afc0-404379ad531c\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.760028 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-utilities\") pod \"2bbb66c4-2f51-4963-afc0-404379ad531c\" (UID: \"2bbb66c4-2f51-4963-afc0-404379ad531c\") " Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.761169 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-utilities" (OuterVolumeSpecName: "utilities") pod "2bbb66c4-2f51-4963-afc0-404379ad531c" (UID: "2bbb66c4-2f51-4963-afc0-404379ad531c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.765802 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bbb66c4-2f51-4963-afc0-404379ad531c-kube-api-access-zl2p7" (OuterVolumeSpecName: "kube-api-access-zl2p7") pod "2bbb66c4-2f51-4963-afc0-404379ad531c" (UID: "2bbb66c4-2f51-4963-afc0-404379ad531c"). InnerVolumeSpecName "kube-api-access-zl2p7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.779318 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2bbb66c4-2f51-4963-afc0-404379ad531c" (UID: "2bbb66c4-2f51-4963-afc0-404379ad531c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.862353 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.862388 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bbb66c4-2f51-4963-afc0-404379ad531c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:37 crc kubenswrapper[4755]: I1124 01:36:37.862398 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zl2p7\" (UniqueName: \"kubernetes.io/projected/2bbb66c4-2f51-4963-afc0-404379ad531c-kube-api-access-zl2p7\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.323556 4755 generic.go:334] "Generic (PLEG): container finished" podID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerID="8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c" exitCode=0 Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.323614 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nvn2q" event={"ID":"2bbb66c4-2f51-4963-afc0-404379ad531c","Type":"ContainerDied","Data":"8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c"} Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.323656 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nvn2q" event={"ID":"2bbb66c4-2f51-4963-afc0-404379ad531c","Type":"ContainerDied","Data":"5d472a6b617dd3fb8cd0d7ac7bca80fe5338be740364dc1347ca2093405cab07"} Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.323677 4755 scope.go:117] "RemoveContainer" containerID="8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c" Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.323702 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nvn2q" Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.354221 4755 scope.go:117] "RemoveContainer" containerID="5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7" Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.367835 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nvn2q"] Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.377888 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nvn2q"] Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.389419 4755 scope.go:117] "RemoveContainer" containerID="d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f" Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.441729 4755 scope.go:117] "RemoveContainer" containerID="8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c" Nov 24 01:36:38 crc kubenswrapper[4755]: E1124 01:36:38.442375 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c\": container with ID starting with 8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c not found: ID does not exist" containerID="8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c" Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.442441 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c"} err="failed to get container status \"8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c\": rpc error: code = NotFound desc = could not find container \"8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c\": container with ID starting with 8e8e38bef4c3fd0c2c69eb5f0743c9e8ecc1409db4e4b4e47636b5778a5fa88c not found: ID does not exist" Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.442704 4755 scope.go:117] "RemoveContainer" containerID="5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7" Nov 24 01:36:38 crc kubenswrapper[4755]: E1124 01:36:38.443091 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7\": container with ID starting with 5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7 not found: ID does not exist" containerID="5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7" Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.443127 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7"} err="failed to get container status \"5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7\": rpc error: code = NotFound desc = could not find container \"5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7\": container with ID starting with 5718db8899064e055a86cf692ddc3553eb0ac26fa093b54c378178af4d4eb2c7 not found: ID does not exist" Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.443152 4755 scope.go:117] "RemoveContainer" containerID="d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f" Nov 24 01:36:38 crc kubenswrapper[4755]: E1124 01:36:38.443813 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f\": container with ID starting with d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f not found: ID does not exist" containerID="d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f" Nov 24 01:36:38 crc kubenswrapper[4755]: I1124 01:36:38.443857 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f"} err="failed to get container status \"d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f\": rpc error: code = NotFound desc = could not find container \"d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f\": container with ID starting with d2c855cf79f258b6cdb2b41eda5013154e506c81894b9db375c03804c592460f not found: ID does not exist" Nov 24 01:36:40 crc kubenswrapper[4755]: I1124 01:36:40.017838 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bbb66c4-2f51-4963-afc0-404379ad531c" path="/var/lib/kubelet/pods/2bbb66c4-2f51-4963-afc0-404379ad531c/volumes" Nov 24 01:36:42 crc kubenswrapper[4755]: I1124 01:36:42.749016 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:42 crc kubenswrapper[4755]: I1124 01:36:42.749372 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:42 crc kubenswrapper[4755]: I1124 01:36:42.799007 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:43 crc kubenswrapper[4755]: I1124 01:36:43.498086 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:43 crc kubenswrapper[4755]: I1124 01:36:43.556291 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9mldg"] Nov 24 01:36:45 crc kubenswrapper[4755]: I1124 01:36:45.409197 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9mldg" podUID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerName="registry-server" containerID="cri-o://5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff" gracePeriod=2 Nov 24 01:36:45 crc kubenswrapper[4755]: I1124 01:36:45.864458 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.006150 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkdpn\" (UniqueName: \"kubernetes.io/projected/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-kube-api-access-vkdpn\") pod \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.006282 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-catalog-content\") pod \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.006469 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-utilities\") pod \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\" (UID: \"6461d6fb-cbe5-4035-9414-10bb3abe8bdd\") " Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.007350 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-utilities" (OuterVolumeSpecName: "utilities") pod "6461d6fb-cbe5-4035-9414-10bb3abe8bdd" (UID: "6461d6fb-cbe5-4035-9414-10bb3abe8bdd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.007678 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.016833 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-kube-api-access-vkdpn" (OuterVolumeSpecName: "kube-api-access-vkdpn") pod "6461d6fb-cbe5-4035-9414-10bb3abe8bdd" (UID: "6461d6fb-cbe5-4035-9414-10bb3abe8bdd"). InnerVolumeSpecName "kube-api-access-vkdpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.109521 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkdpn\" (UniqueName: \"kubernetes.io/projected/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-kube-api-access-vkdpn\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.175994 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6461d6fb-cbe5-4035-9414-10bb3abe8bdd" (UID: "6461d6fb-cbe5-4035-9414-10bb3abe8bdd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.211194 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6461d6fb-cbe5-4035-9414-10bb3abe8bdd-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.425324 4755 generic.go:334] "Generic (PLEG): container finished" podID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerID="5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff" exitCode=0 Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.425379 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9mldg" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.425402 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9mldg" event={"ID":"6461d6fb-cbe5-4035-9414-10bb3abe8bdd","Type":"ContainerDied","Data":"5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff"} Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.425830 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9mldg" event={"ID":"6461d6fb-cbe5-4035-9414-10bb3abe8bdd","Type":"ContainerDied","Data":"654789da65c0252ddf05d55beb2f134518f9abd5beeb11eb085275e723427ddf"} Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.425849 4755 scope.go:117] "RemoveContainer" containerID="5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.459672 4755 scope.go:117] "RemoveContainer" containerID="0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.465334 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9mldg"] Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.478217 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9mldg"] Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.490528 4755 scope.go:117] "RemoveContainer" containerID="f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.522682 4755 scope.go:117] "RemoveContainer" containerID="5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff" Nov 24 01:36:46 crc kubenswrapper[4755]: E1124 01:36:46.522977 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff\": container with ID starting with 5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff not found: ID does not exist" containerID="5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.523025 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff"} err="failed to get container status \"5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff\": rpc error: code = NotFound desc = could not find container \"5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff\": container with ID starting with 5be629751bbb1d8205b485940cbe87536f5c3558d087bcaf097f8f8683818bff not found: ID does not exist" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.523064 4755 scope.go:117] "RemoveContainer" containerID="0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00" Nov 24 01:36:46 crc kubenswrapper[4755]: E1124 01:36:46.523378 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00\": container with ID starting with 0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00 not found: ID does not exist" containerID="0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.523414 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00"} err="failed to get container status \"0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00\": rpc error: code = NotFound desc = could not find container \"0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00\": container with ID starting with 0a15a6684da1029ccab43cb3385fe55b0ccdbbcdb07e7845e66b881374c89f00 not found: ID does not exist" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.523435 4755 scope.go:117] "RemoveContainer" containerID="f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34" Nov 24 01:36:46 crc kubenswrapper[4755]: E1124 01:36:46.523722 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34\": container with ID starting with f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34 not found: ID does not exist" containerID="f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34" Nov 24 01:36:46 crc kubenswrapper[4755]: I1124 01:36:46.523762 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34"} err="failed to get container status \"f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34\": rpc error: code = NotFound desc = could not find container \"f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34\": container with ID starting with f317875a5983187672e48fb0fa191796575facd4af1c6731eb628a82a908ec34 not found: ID does not exist" Nov 24 01:36:48 crc kubenswrapper[4755]: I1124 01:36:48.008527 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" path="/var/lib/kubelet/pods/6461d6fb-cbe5-4035-9414-10bb3abe8bdd/volumes" Nov 24 01:36:49 crc kubenswrapper[4755]: I1124 01:36:49.462567 4755 generic.go:334] "Generic (PLEG): container finished" podID="a1f11949-be37-4a9d-9e73-b0cbc20a6d1e" containerID="64657c7f5280a01e20900ba86c75810422421c5fb032f29f74cc15b42b50e1e2" exitCode=0 Nov 24 01:36:49 crc kubenswrapper[4755]: I1124 01:36:49.462733 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" event={"ID":"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e","Type":"ContainerDied","Data":"64657c7f5280a01e20900ba86c75810422421c5fb032f29f74cc15b42b50e1e2"} Nov 24 01:36:50 crc kubenswrapper[4755]: I1124 01:36:50.866776 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.013759 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-inventory\") pod \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.013979 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dq7z\" (UniqueName: \"kubernetes.io/projected/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-kube-api-access-4dq7z\") pod \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.014075 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-bootstrap-combined-ca-bundle\") pod \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.014151 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-ssh-key\") pod \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\" (UID: \"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e\") " Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.019350 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "a1f11949-be37-4a9d-9e73-b0cbc20a6d1e" (UID: "a1f11949-be37-4a9d-9e73-b0cbc20a6d1e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.019523 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-kube-api-access-4dq7z" (OuterVolumeSpecName: "kube-api-access-4dq7z") pod "a1f11949-be37-4a9d-9e73-b0cbc20a6d1e" (UID: "a1f11949-be37-4a9d-9e73-b0cbc20a6d1e"). InnerVolumeSpecName "kube-api-access-4dq7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.044429 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a1f11949-be37-4a9d-9e73-b0cbc20a6d1e" (UID: "a1f11949-be37-4a9d-9e73-b0cbc20a6d1e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.045778 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-inventory" (OuterVolumeSpecName: "inventory") pod "a1f11949-be37-4a9d-9e73-b0cbc20a6d1e" (UID: "a1f11949-be37-4a9d-9e73-b0cbc20a6d1e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.117087 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.117124 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dq7z\" (UniqueName: \"kubernetes.io/projected/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-kube-api-access-4dq7z\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.117139 4755 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.117150 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1f11949-be37-4a9d-9e73-b0cbc20a6d1e-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.486486 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" event={"ID":"a1f11949-be37-4a9d-9e73-b0cbc20a6d1e","Type":"ContainerDied","Data":"82eb0f672156115c4579e69187b88e8c459f1b5bc8f65fa92e54295c8a878380"} Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.486543 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82eb0f672156115c4579e69187b88e8c459f1b5bc8f65fa92e54295c8a878380" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.486543 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.572802 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8"] Nov 24 01:36:51 crc kubenswrapper[4755]: E1124 01:36:51.573205 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerName="registry-server" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.573221 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerName="registry-server" Nov 24 01:36:51 crc kubenswrapper[4755]: E1124 01:36:51.573240 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerName="registry-server" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.573247 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerName="registry-server" Nov 24 01:36:51 crc kubenswrapper[4755]: E1124 01:36:51.573261 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerName="extract-content" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.573267 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerName="extract-content" Nov 24 01:36:51 crc kubenswrapper[4755]: E1124 01:36:51.573280 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerName="extract-utilities" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.573286 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerName="extract-utilities" Nov 24 01:36:51 crc kubenswrapper[4755]: E1124 01:36:51.573307 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerName="extract-utilities" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.573312 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerName="extract-utilities" Nov 24 01:36:51 crc kubenswrapper[4755]: E1124 01:36:51.573329 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerName="extract-content" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.573335 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerName="extract-content" Nov 24 01:36:51 crc kubenswrapper[4755]: E1124 01:36:51.573344 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1f11949-be37-4a9d-9e73-b0cbc20a6d1e" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.573352 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1f11949-be37-4a9d-9e73-b0cbc20a6d1e" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.573528 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bbb66c4-2f51-4963-afc0-404379ad531c" containerName="registry-server" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.573551 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="6461d6fb-cbe5-4035-9414-10bb3abe8bdd" containerName="registry-server" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.573574 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1f11949-be37-4a9d-9e73-b0cbc20a6d1e" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.574305 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.576132 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.577417 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.577786 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.577963 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.582790 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8"] Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.625940 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.626026 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.626052 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v8lg\" (UniqueName: \"kubernetes.io/projected/c0341d77-5182-4cb4-b4f8-4b3389c7887b-kube-api-access-4v8lg\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.727653 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.727709 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v8lg\" (UniqueName: \"kubernetes.io/projected/c0341d77-5182-4cb4-b4f8-4b3389c7887b-kube-api-access-4v8lg\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.727840 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.734755 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.739798 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.743536 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v8lg\" (UniqueName: \"kubernetes.io/projected/c0341d77-5182-4cb4-b4f8-4b3389c7887b-kube-api-access-4v8lg\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:51 crc kubenswrapper[4755]: I1124 01:36:51.901239 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:36:52 crc kubenswrapper[4755]: I1124 01:36:52.439064 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8"] Nov 24 01:36:52 crc kubenswrapper[4755]: W1124 01:36:52.441843 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0341d77_5182_4cb4_b4f8_4b3389c7887b.slice/crio-4df7bbd33d2e155d7bbd75962d0a086be072fc349ba4cfecd49d32b2f31a01f0 WatchSource:0}: Error finding container 4df7bbd33d2e155d7bbd75962d0a086be072fc349ba4cfecd49d32b2f31a01f0: Status 404 returned error can't find the container with id 4df7bbd33d2e155d7bbd75962d0a086be072fc349ba4cfecd49d32b2f31a01f0 Nov 24 01:36:52 crc kubenswrapper[4755]: I1124 01:36:52.495131 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" event={"ID":"c0341d77-5182-4cb4-b4f8-4b3389c7887b","Type":"ContainerStarted","Data":"4df7bbd33d2e155d7bbd75962d0a086be072fc349ba4cfecd49d32b2f31a01f0"} Nov 24 01:36:53 crc kubenswrapper[4755]: I1124 01:36:53.507701 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" event={"ID":"c0341d77-5182-4cb4-b4f8-4b3389c7887b","Type":"ContainerStarted","Data":"1274b653c87572299252f31999b9f0474897fb8220bf2be5e4038f01e6dd0409"} Nov 24 01:36:53 crc kubenswrapper[4755]: I1124 01:36:53.534016 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" podStartSLOduration=2.092543786 podStartE2EDuration="2.533994911s" podCreationTimestamp="2025-11-24 01:36:51 +0000 UTC" firstStartedPulling="2025-11-24 01:36:52.444002195 +0000 UTC m=+1437.130067696" lastFinishedPulling="2025-11-24 01:36:52.88545332 +0000 UTC m=+1437.571518821" observedRunningTime="2025-11-24 01:36:53.526578683 +0000 UTC m=+1438.212644244" watchObservedRunningTime="2025-11-24 01:36:53.533994911 +0000 UTC m=+1438.220060432" Nov 24 01:37:33 crc kubenswrapper[4755]: I1124 01:37:33.295496 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:37:33 crc kubenswrapper[4755]: I1124 01:37:33.296104 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.051762 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-4wps5"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.059394 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-lp9ng"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.067259 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-lp9ng"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.076576 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5521-account-create-8qtxh"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.085559 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-4wps5"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.092200 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-e4bc-account-create-hcblx"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.099984 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-p6ns4"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.107215 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-e4bc-account-create-hcblx"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.115820 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-5521-account-create-8qtxh"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.131946 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-p6ns4"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.143482 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-0ff2-account-create-vpglm"] Nov 24 01:37:53 crc kubenswrapper[4755]: I1124 01:37:53.150822 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-0ff2-account-create-vpglm"] Nov 24 01:37:54 crc kubenswrapper[4755]: I1124 01:37:54.008013 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09e7a2d3-efe2-4e9c-b485-937b4b4a2d38" path="/var/lib/kubelet/pods/09e7a2d3-efe2-4e9c-b485-937b4b4a2d38/volumes" Nov 24 01:37:54 crc kubenswrapper[4755]: I1124 01:37:54.009167 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a8c7a3e-d7af-4cdb-966b-d38e19315024" path="/var/lib/kubelet/pods/1a8c7a3e-d7af-4cdb-966b-d38e19315024/volumes" Nov 24 01:37:54 crc kubenswrapper[4755]: I1124 01:37:54.009954 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2387b3cd-ba87-4b14-b866-3a065b3d451c" path="/var/lib/kubelet/pods/2387b3cd-ba87-4b14-b866-3a065b3d451c/volumes" Nov 24 01:37:54 crc kubenswrapper[4755]: I1124 01:37:54.010680 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="550a75dc-d88c-47f3-8c08-2803fb661736" path="/var/lib/kubelet/pods/550a75dc-d88c-47f3-8c08-2803fb661736/volumes" Nov 24 01:37:54 crc kubenswrapper[4755]: I1124 01:37:54.012057 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ee50ae2-bf98-4c78-a071-7dd4d45f58dd" path="/var/lib/kubelet/pods/9ee50ae2-bf98-4c78-a071-7dd4d45f58dd/volumes" Nov 24 01:37:54 crc kubenswrapper[4755]: I1124 01:37:54.012802 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab03774c-aec3-4384-953c-4c8cff0ab899" path="/var/lib/kubelet/pods/ab03774c-aec3-4384-953c-4c8cff0ab899/volumes" Nov 24 01:38:02 crc kubenswrapper[4755]: I1124 01:38:02.027564 4755 scope.go:117] "RemoveContainer" containerID="b0d3f78d72722d216025e67130fce70acfb1909b78af2d8215065933314ab20e" Nov 24 01:38:02 crc kubenswrapper[4755]: I1124 01:38:02.058855 4755 scope.go:117] "RemoveContainer" containerID="0f376ddc4ce8638d6e40affd73239870fb3b461a106843874a1c8b82b38190e4" Nov 24 01:38:02 crc kubenswrapper[4755]: I1124 01:38:02.157720 4755 scope.go:117] "RemoveContainer" containerID="25a647325b5b029340ccde9e0d0b6dad7afb12808db0917d0f59bacc180be88d" Nov 24 01:38:02 crc kubenswrapper[4755]: I1124 01:38:02.183490 4755 scope.go:117] "RemoveContainer" containerID="e1bc3fe47ad11c1a2caba8e59e6511b303318dc164b55c83ec7427dc08afd17f" Nov 24 01:38:02 crc kubenswrapper[4755]: I1124 01:38:02.207895 4755 scope.go:117] "RemoveContainer" containerID="151bf023041b498e9d302793d2829971b1c2a46c8c9f824f98fb85a964f05d97" Nov 24 01:38:02 crc kubenswrapper[4755]: I1124 01:38:02.254719 4755 scope.go:117] "RemoveContainer" containerID="7263e34f93e200f35dd09fe136ab328ef993a1d2138b94e18518097dc4c2b4cd" Nov 24 01:38:02 crc kubenswrapper[4755]: I1124 01:38:02.298255 4755 scope.go:117] "RemoveContainer" containerID="343a538df85b38310a1480db695d7a86df4622fc9921d0c16eaa6a0fd0ffa0e6" Nov 24 01:38:02 crc kubenswrapper[4755]: I1124 01:38:02.334869 4755 scope.go:117] "RemoveContainer" containerID="af9a2fd5ed610ba25c55ec8f7243fe90e22f72f84e9edee48aabaa44099851aa" Nov 24 01:38:02 crc kubenswrapper[4755]: I1124 01:38:02.355730 4755 scope.go:117] "RemoveContainer" containerID="f5426c3a0db25be9cb06bf0eb32faa1b944d2aaf6c5d66100ce614ac9d02389a" Nov 24 01:38:02 crc kubenswrapper[4755]: I1124 01:38:02.378925 4755 scope.go:117] "RemoveContainer" containerID="da83f87d6d1491e1c07fdd2e4b7ee45a655f446cfe00a49d772c5b3b1a306c59" Nov 24 01:38:03 crc kubenswrapper[4755]: I1124 01:38:03.295881 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:38:03 crc kubenswrapper[4755]: I1124 01:38:03.296241 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:38:19 crc kubenswrapper[4755]: I1124 01:38:19.046826 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-876a-account-create-45826"] Nov 24 01:38:19 crc kubenswrapper[4755]: I1124 01:38:19.056512 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-k42zb"] Nov 24 01:38:19 crc kubenswrapper[4755]: I1124 01:38:19.067101 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-k42zb"] Nov 24 01:38:19 crc kubenswrapper[4755]: I1124 01:38:19.075500 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-876a-account-create-45826"] Nov 24 01:38:20 crc kubenswrapper[4755]: I1124 01:38:20.019688 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="281e34b7-f570-479f-ac9d-0e2b98d3f24f" path="/var/lib/kubelet/pods/281e34b7-f570-479f-ac9d-0e2b98d3f24f/volumes" Nov 24 01:38:20 crc kubenswrapper[4755]: I1124 01:38:20.027752 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8b666dc-55f4-4e9f-9768-147815005e1e" path="/var/lib/kubelet/pods/d8b666dc-55f4-4e9f-9768-147815005e1e/volumes" Nov 24 01:38:20 crc kubenswrapper[4755]: I1124 01:38:20.033170 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-5283-account-create-fhrq4"] Nov 24 01:38:20 crc kubenswrapper[4755]: I1124 01:38:20.045881 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-a00b-account-create-dmk77"] Nov 24 01:38:20 crc kubenswrapper[4755]: I1124 01:38:20.058808 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-w9lth"] Nov 24 01:38:20 crc kubenswrapper[4755]: I1124 01:38:20.068986 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-cvwzv"] Nov 24 01:38:20 crc kubenswrapper[4755]: I1124 01:38:20.078859 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-a00b-account-create-dmk77"] Nov 24 01:38:20 crc kubenswrapper[4755]: I1124 01:38:20.085921 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-5283-account-create-fhrq4"] Nov 24 01:38:20 crc kubenswrapper[4755]: I1124 01:38:20.094222 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-w9lth"] Nov 24 01:38:20 crc kubenswrapper[4755]: I1124 01:38:20.101034 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-cvwzv"] Nov 24 01:38:22 crc kubenswrapper[4755]: I1124 01:38:22.013299 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="866eeb81-622c-4ea2-a727-184e5e7d745c" path="/var/lib/kubelet/pods/866eeb81-622c-4ea2-a727-184e5e7d745c/volumes" Nov 24 01:38:22 crc kubenswrapper[4755]: I1124 01:38:22.014418 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6" path="/var/lib/kubelet/pods/a1b6e064-c1f0-4efd-9a54-5d1f7a4708b6/volumes" Nov 24 01:38:22 crc kubenswrapper[4755]: I1124 01:38:22.015261 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3ad27b1-9595-4f03-b472-2dfdf7e70cc8" path="/var/lib/kubelet/pods/c3ad27b1-9595-4f03-b472-2dfdf7e70cc8/volumes" Nov 24 01:38:22 crc kubenswrapper[4755]: I1124 01:38:22.015980 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c85f6518-6046-441c-afae-4bd797cb807f" path="/var/lib/kubelet/pods/c85f6518-6046-441c-afae-4bd797cb807f/volumes" Nov 24 01:38:22 crc kubenswrapper[4755]: I1124 01:38:22.039847 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-vtf8f"] Nov 24 01:38:22 crc kubenswrapper[4755]: I1124 01:38:22.049825 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-vtf8f"] Nov 24 01:38:22 crc kubenswrapper[4755]: I1124 01:38:22.431509 4755 generic.go:334] "Generic (PLEG): container finished" podID="c0341d77-5182-4cb4-b4f8-4b3389c7887b" containerID="1274b653c87572299252f31999b9f0474897fb8220bf2be5e4038f01e6dd0409" exitCode=0 Nov 24 01:38:22 crc kubenswrapper[4755]: I1124 01:38:22.431592 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" event={"ID":"c0341d77-5182-4cb4-b4f8-4b3389c7887b","Type":"ContainerDied","Data":"1274b653c87572299252f31999b9f0474897fb8220bf2be5e4038f01e6dd0409"} Nov 24 01:38:23 crc kubenswrapper[4755]: I1124 01:38:23.812799 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:38:23 crc kubenswrapper[4755]: I1124 01:38:23.913787 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-inventory\") pod \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " Nov 24 01:38:23 crc kubenswrapper[4755]: I1124 01:38:23.913862 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4v8lg\" (UniqueName: \"kubernetes.io/projected/c0341d77-5182-4cb4-b4f8-4b3389c7887b-kube-api-access-4v8lg\") pod \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " Nov 24 01:38:23 crc kubenswrapper[4755]: I1124 01:38:23.913911 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-ssh-key\") pod \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\" (UID: \"c0341d77-5182-4cb4-b4f8-4b3389c7887b\") " Nov 24 01:38:23 crc kubenswrapper[4755]: I1124 01:38:23.919518 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0341d77-5182-4cb4-b4f8-4b3389c7887b-kube-api-access-4v8lg" (OuterVolumeSpecName: "kube-api-access-4v8lg") pod "c0341d77-5182-4cb4-b4f8-4b3389c7887b" (UID: "c0341d77-5182-4cb4-b4f8-4b3389c7887b"). InnerVolumeSpecName "kube-api-access-4v8lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:38:23 crc kubenswrapper[4755]: I1124 01:38:23.939923 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-inventory" (OuterVolumeSpecName: "inventory") pod "c0341d77-5182-4cb4-b4f8-4b3389c7887b" (UID: "c0341d77-5182-4cb4-b4f8-4b3389c7887b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:38:23 crc kubenswrapper[4755]: I1124 01:38:23.947576 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c0341d77-5182-4cb4-b4f8-4b3389c7887b" (UID: "c0341d77-5182-4cb4-b4f8-4b3389c7887b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.011133 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dee4f3a-cb77-4137-b459-9c1be1a005ef" path="/var/lib/kubelet/pods/6dee4f3a-cb77-4137-b459-9c1be1a005ef/volumes" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.015996 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.016041 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4v8lg\" (UniqueName: \"kubernetes.io/projected/c0341d77-5182-4cb4-b4f8-4b3389c7887b-kube-api-access-4v8lg\") on node \"crc\" DevicePath \"\"" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.016078 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0341d77-5182-4cb4-b4f8-4b3389c7887b-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.451867 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" event={"ID":"c0341d77-5182-4cb4-b4f8-4b3389c7887b","Type":"ContainerDied","Data":"4df7bbd33d2e155d7bbd75962d0a086be072fc349ba4cfecd49d32b2f31a01f0"} Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.452304 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4df7bbd33d2e155d7bbd75962d0a086be072fc349ba4cfecd49d32b2f31a01f0" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.451923 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.537233 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj"] Nov 24 01:38:24 crc kubenswrapper[4755]: E1124 01:38:24.537735 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0341d77-5182-4cb4-b4f8-4b3389c7887b" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.537760 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0341d77-5182-4cb4-b4f8-4b3389c7887b" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.537981 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0341d77-5182-4cb4-b4f8-4b3389c7887b" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.538733 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.540765 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.542627 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.542713 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.547089 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.557212 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj"] Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.626181 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcbtz\" (UniqueName: \"kubernetes.io/projected/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-kube-api-access-vcbtz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-czxxj\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.626588 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-czxxj\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.627105 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-czxxj\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.728724 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-czxxj\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.728906 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-czxxj\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.728973 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcbtz\" (UniqueName: \"kubernetes.io/projected/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-kube-api-access-vcbtz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-czxxj\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.735184 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-czxxj\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.735434 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-czxxj\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.744904 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcbtz\" (UniqueName: \"kubernetes.io/projected/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-kube-api-access-vcbtz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-czxxj\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:24 crc kubenswrapper[4755]: I1124 01:38:24.866788 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:38:25 crc kubenswrapper[4755]: I1124 01:38:25.415429 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj"] Nov 24 01:38:25 crc kubenswrapper[4755]: I1124 01:38:25.426533 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 01:38:25 crc kubenswrapper[4755]: I1124 01:38:25.462684 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" event={"ID":"f823da2a-bba1-4b6e-9504-e03ec6a3b94f","Type":"ContainerStarted","Data":"4d66f8e4d570e5055b71d0cf63da894b3d6bb72a6ab0f9ffe90c54c8980d3014"} Nov 24 01:38:26 crc kubenswrapper[4755]: I1124 01:38:26.485144 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" event={"ID":"f823da2a-bba1-4b6e-9504-e03ec6a3b94f","Type":"ContainerStarted","Data":"4e3a0ea280d9ad86e49a7eaac16b8eaa1e457cbe143ef649fe212d3208b39da7"} Nov 24 01:38:26 crc kubenswrapper[4755]: I1124 01:38:26.505930 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" podStartSLOduration=1.963515679 podStartE2EDuration="2.505910472s" podCreationTimestamp="2025-11-24 01:38:24 +0000 UTC" firstStartedPulling="2025-11-24 01:38:25.426258059 +0000 UTC m=+1530.112323560" lastFinishedPulling="2025-11-24 01:38:25.968652842 +0000 UTC m=+1530.654718353" observedRunningTime="2025-11-24 01:38:26.503146245 +0000 UTC m=+1531.189211766" watchObservedRunningTime="2025-11-24 01:38:26.505910472 +0000 UTC m=+1531.191975973" Nov 24 01:38:27 crc kubenswrapper[4755]: I1124 01:38:27.032499 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-fhhlw"] Nov 24 01:38:27 crc kubenswrapper[4755]: I1124 01:38:27.044369 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-fhhlw"] Nov 24 01:38:28 crc kubenswrapper[4755]: I1124 01:38:28.009214 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba05a62d-df27-4947-8b1a-bf6410f576a9" path="/var/lib/kubelet/pods/ba05a62d-df27-4947-8b1a-bf6410f576a9/volumes" Nov 24 01:38:33 crc kubenswrapper[4755]: I1124 01:38:33.295936 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:38:33 crc kubenswrapper[4755]: I1124 01:38:33.296597 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:38:33 crc kubenswrapper[4755]: I1124 01:38:33.296689 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:38:33 crc kubenswrapper[4755]: I1124 01:38:33.297556 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:38:33 crc kubenswrapper[4755]: I1124 01:38:33.297678 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" gracePeriod=600 Nov 24 01:38:33 crc kubenswrapper[4755]: E1124 01:38:33.417691 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:38:33 crc kubenswrapper[4755]: I1124 01:38:33.550245 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" exitCode=0 Nov 24 01:38:33 crc kubenswrapper[4755]: I1124 01:38:33.550289 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3"} Nov 24 01:38:33 crc kubenswrapper[4755]: I1124 01:38:33.550321 4755 scope.go:117] "RemoveContainer" containerID="6046690ccd6cfb15aff12bbb82767c5a83c9c84c292122438b2c2722b65a5466" Nov 24 01:38:33 crc kubenswrapper[4755]: I1124 01:38:33.551119 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:38:33 crc kubenswrapper[4755]: E1124 01:38:33.551402 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:38:46 crc kubenswrapper[4755]: I1124 01:38:46.004130 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:38:46 crc kubenswrapper[4755]: E1124 01:38:46.004977 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:38:57 crc kubenswrapper[4755]: I1124 01:38:57.997693 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:38:57 crc kubenswrapper[4755]: E1124 01:38:57.998395 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:38:58 crc kubenswrapper[4755]: I1124 01:38:58.054567 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-65g8w"] Nov 24 01:38:58 crc kubenswrapper[4755]: I1124 01:38:58.069751 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-65g8w"] Nov 24 01:39:00 crc kubenswrapper[4755]: I1124 01:39:00.008117 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f90a48fa-6911-4df9-a1e8-d64ba7547daf" path="/var/lib/kubelet/pods/f90a48fa-6911-4df9-a1e8-d64ba7547daf/volumes" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.556982 4755 scope.go:117] "RemoveContainer" containerID="70869526a2f3d6dcdf3530833b5417a288777956a15dc904555b139da5d304a9" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.597921 4755 scope.go:117] "RemoveContainer" containerID="95f68ef420aa4049198a419a3c649b87da4bbd4e0086d4b8fee3caae07daefdc" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.625616 4755 scope.go:117] "RemoveContainer" containerID="448928381bf34cd7e310c0d907ab37d7068898962c209f6742fa685befbcf1f8" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.698861 4755 scope.go:117] "RemoveContainer" containerID="f59b4c5cd502b8565f2159fdb02163c0d0fd6805f2b214da42cfaa7873855440" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.751133 4755 scope.go:117] "RemoveContainer" containerID="7f947942acf76534330daf32a36284acadfad746d2fb9a8bc8bb97f23bdb8da5" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.779324 4755 scope.go:117] "RemoveContainer" containerID="631b6c9e5863be7e12b9fa348a729d94f57a54ba7c343b25bcaadf1839ac6f8c" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.828651 4755 scope.go:117] "RemoveContainer" containerID="03fd48e4131bf585cbc0f7d53823d3623f20dd4ee33ec85fde122a7c9ebaed70" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.849017 4755 scope.go:117] "RemoveContainer" containerID="0a3bad00098562ade0fcf5771bbbdb5b5bf9f5b043ae4aa0486f7624ecaae205" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.886717 4755 scope.go:117] "RemoveContainer" containerID="f4e9d94245f0b6bfce6f0573d63c4c2158e63641879326bbec6c270585755e62" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.911276 4755 scope.go:117] "RemoveContainer" containerID="a3363271518c94ebc7932b074033ddf197971f75fd4a1b47f2b54f0ded3e7475" Nov 24 01:39:02 crc kubenswrapper[4755]: I1124 01:39:02.935574 4755 scope.go:117] "RemoveContainer" containerID="2e32b437a14ce3216d17cf1623a3a94175e633dc2760e42eb5347b6ae5e35ae7" Nov 24 01:39:08 crc kubenswrapper[4755]: I1124 01:39:08.027006 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-lb2nq"] Nov 24 01:39:08 crc kubenswrapper[4755]: I1124 01:39:08.035272 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-lb2nq"] Nov 24 01:39:08 crc kubenswrapper[4755]: I1124 01:39:08.043505 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-tgz4r"] Nov 24 01:39:08 crc kubenswrapper[4755]: I1124 01:39:08.050945 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-tgz4r"] Nov 24 01:39:10 crc kubenswrapper[4755]: I1124 01:39:10.013511 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebd8073d-4cda-4515-8999-cfdb8a4fcc7f" path="/var/lib/kubelet/pods/ebd8073d-4cda-4515-8999-cfdb8a4fcc7f/volumes" Nov 24 01:39:10 crc kubenswrapper[4755]: I1124 01:39:10.015078 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec11ae96-46e1-47a2-ae19-61941253ce7c" path="/var/lib/kubelet/pods/ec11ae96-46e1-47a2-ae19-61941253ce7c/volumes" Nov 24 01:39:10 crc kubenswrapper[4755]: I1124 01:39:10.996869 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:39:10 crc kubenswrapper[4755]: E1124 01:39:10.997277 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:39:11 crc kubenswrapper[4755]: I1124 01:39:11.029586 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-srjbc"] Nov 24 01:39:11 crc kubenswrapper[4755]: I1124 01:39:11.039963 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-srjbc"] Nov 24 01:39:12 crc kubenswrapper[4755]: I1124 01:39:12.012983 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bed5952-1a88-4314-befd-bb76c5431cdd" path="/var/lib/kubelet/pods/2bed5952-1a88-4314-befd-bb76c5431cdd/volumes" Nov 24 01:39:23 crc kubenswrapper[4755]: I1124 01:39:23.997772 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:39:24 crc kubenswrapper[4755]: E1124 01:39:24.000976 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:39:24 crc kubenswrapper[4755]: I1124 01:39:24.051035 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-rhwbp"] Nov 24 01:39:24 crc kubenswrapper[4755]: I1124 01:39:24.059210 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-rhwbp"] Nov 24 01:39:26 crc kubenswrapper[4755]: I1124 01:39:26.007190 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aea62103-9b85-495d-bb71-3c69c02a3000" path="/var/lib/kubelet/pods/aea62103-9b85-495d-bb71-3c69c02a3000/volumes" Nov 24 01:39:35 crc kubenswrapper[4755]: I1124 01:39:35.175226 4755 generic.go:334] "Generic (PLEG): container finished" podID="f823da2a-bba1-4b6e-9504-e03ec6a3b94f" containerID="4e3a0ea280d9ad86e49a7eaac16b8eaa1e457cbe143ef649fe212d3208b39da7" exitCode=0 Nov 24 01:39:35 crc kubenswrapper[4755]: I1124 01:39:35.175334 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" event={"ID":"f823da2a-bba1-4b6e-9504-e03ec6a3b94f","Type":"ContainerDied","Data":"4e3a0ea280d9ad86e49a7eaac16b8eaa1e457cbe143ef649fe212d3208b39da7"} Nov 24 01:39:36 crc kubenswrapper[4755]: I1124 01:39:36.581080 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:39:36 crc kubenswrapper[4755]: I1124 01:39:36.623002 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-inventory\") pod \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " Nov 24 01:39:36 crc kubenswrapper[4755]: I1124 01:39:36.623260 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-ssh-key\") pod \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " Nov 24 01:39:36 crc kubenswrapper[4755]: I1124 01:39:36.623301 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcbtz\" (UniqueName: \"kubernetes.io/projected/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-kube-api-access-vcbtz\") pod \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\" (UID: \"f823da2a-bba1-4b6e-9504-e03ec6a3b94f\") " Nov 24 01:39:36 crc kubenswrapper[4755]: I1124 01:39:36.629955 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-kube-api-access-vcbtz" (OuterVolumeSpecName: "kube-api-access-vcbtz") pod "f823da2a-bba1-4b6e-9504-e03ec6a3b94f" (UID: "f823da2a-bba1-4b6e-9504-e03ec6a3b94f"). InnerVolumeSpecName "kube-api-access-vcbtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:39:36 crc kubenswrapper[4755]: I1124 01:39:36.651911 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f823da2a-bba1-4b6e-9504-e03ec6a3b94f" (UID: "f823da2a-bba1-4b6e-9504-e03ec6a3b94f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:39:36 crc kubenswrapper[4755]: I1124 01:39:36.656285 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-inventory" (OuterVolumeSpecName: "inventory") pod "f823da2a-bba1-4b6e-9504-e03ec6a3b94f" (UID: "f823da2a-bba1-4b6e-9504-e03ec6a3b94f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:39:36 crc kubenswrapper[4755]: I1124 01:39:36.725753 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:39:36 crc kubenswrapper[4755]: I1124 01:39:36.725795 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcbtz\" (UniqueName: \"kubernetes.io/projected/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-kube-api-access-vcbtz\") on node \"crc\" DevicePath \"\"" Nov 24 01:39:36 crc kubenswrapper[4755]: I1124 01:39:36.725817 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f823da2a-bba1-4b6e-9504-e03ec6a3b94f-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.198341 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" event={"ID":"f823da2a-bba1-4b6e-9504-e03ec6a3b94f","Type":"ContainerDied","Data":"4d66f8e4d570e5055b71d0cf63da894b3d6bb72a6ab0f9ffe90c54c8980d3014"} Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.198383 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d66f8e4d570e5055b71d0cf63da894b3d6bb72a6ab0f9ffe90c54c8980d3014" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.198446 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-czxxj" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.286972 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q"] Nov 24 01:39:37 crc kubenswrapper[4755]: E1124 01:39:37.287430 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f823da2a-bba1-4b6e-9504-e03ec6a3b94f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.287453 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f823da2a-bba1-4b6e-9504-e03ec6a3b94f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.287709 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f823da2a-bba1-4b6e-9504-e03ec6a3b94f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.288431 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.292326 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.292375 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.292434 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.292809 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.326544 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q"] Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.335002 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-phn9q\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.335130 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d84pq\" (UniqueName: \"kubernetes.io/projected/dac685c9-1650-4372-9f79-0c359d3169eb-kube-api-access-d84pq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-phn9q\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.335267 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-phn9q\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.437475 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-phn9q\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.437721 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d84pq\" (UniqueName: \"kubernetes.io/projected/dac685c9-1650-4372-9f79-0c359d3169eb-kube-api-access-d84pq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-phn9q\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.437847 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-phn9q\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.442087 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-phn9q\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.443549 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-phn9q\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.460016 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d84pq\" (UniqueName: \"kubernetes.io/projected/dac685c9-1650-4372-9f79-0c359d3169eb-kube-api-access-d84pq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-phn9q\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.610226 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:37 crc kubenswrapper[4755]: I1124 01:39:37.997296 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:39:37 crc kubenswrapper[4755]: E1124 01:39:37.997854 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:39:38 crc kubenswrapper[4755]: I1124 01:39:38.139305 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q"] Nov 24 01:39:38 crc kubenswrapper[4755]: W1124 01:39:38.142033 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddac685c9_1650_4372_9f79_0c359d3169eb.slice/crio-fb1caf39ee0ac01a4a109f0941713dd90d38d92baaa9b215677765a8da8f0492 WatchSource:0}: Error finding container fb1caf39ee0ac01a4a109f0941713dd90d38d92baaa9b215677765a8da8f0492: Status 404 returned error can't find the container with id fb1caf39ee0ac01a4a109f0941713dd90d38d92baaa9b215677765a8da8f0492 Nov 24 01:39:38 crc kubenswrapper[4755]: I1124 01:39:38.208458 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" event={"ID":"dac685c9-1650-4372-9f79-0c359d3169eb","Type":"ContainerStarted","Data":"fb1caf39ee0ac01a4a109f0941713dd90d38d92baaa9b215677765a8da8f0492"} Nov 24 01:39:39 crc kubenswrapper[4755]: I1124 01:39:39.229233 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" event={"ID":"dac685c9-1650-4372-9f79-0c359d3169eb","Type":"ContainerStarted","Data":"2bda82fa341e742fd03a9908e8f648bd6492de95ed83d8df50132360ee30746b"} Nov 24 01:39:39 crc kubenswrapper[4755]: I1124 01:39:39.254272 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" podStartSLOduration=1.6976406640000001 podStartE2EDuration="2.254250602s" podCreationTimestamp="2025-11-24 01:39:37 +0000 UTC" firstStartedPulling="2025-11-24 01:39:38.1490629 +0000 UTC m=+1602.835128401" lastFinishedPulling="2025-11-24 01:39:38.705672828 +0000 UTC m=+1603.391738339" observedRunningTime="2025-11-24 01:39:39.249886351 +0000 UTC m=+1603.935951872" watchObservedRunningTime="2025-11-24 01:39:39.254250602 +0000 UTC m=+1603.940316123" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.040768 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d9gwb"] Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.042781 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.054647 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d9gwb"] Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.104206 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzxhs\" (UniqueName: \"kubernetes.io/projected/2c872ecc-6475-46b9-9d9d-41280873ea94-kube-api-access-mzxhs\") pod \"community-operators-d9gwb\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.104320 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-catalog-content\") pod \"community-operators-d9gwb\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.104377 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-utilities\") pod \"community-operators-d9gwb\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.206121 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzxhs\" (UniqueName: \"kubernetes.io/projected/2c872ecc-6475-46b9-9d9d-41280873ea94-kube-api-access-mzxhs\") pod \"community-operators-d9gwb\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.206179 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-catalog-content\") pod \"community-operators-d9gwb\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.206208 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-utilities\") pod \"community-operators-d9gwb\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.206885 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-utilities\") pod \"community-operators-d9gwb\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.206982 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-catalog-content\") pod \"community-operators-d9gwb\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.226467 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzxhs\" (UniqueName: \"kubernetes.io/projected/2c872ecc-6475-46b9-9d9d-41280873ea94-kube-api-access-mzxhs\") pod \"community-operators-d9gwb\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:41 crc kubenswrapper[4755]: I1124 01:39:41.395010 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:42 crc kubenswrapper[4755]: I1124 01:39:42.055235 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d9gwb"] Nov 24 01:39:42 crc kubenswrapper[4755]: I1124 01:39:42.266560 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9gwb" event={"ID":"2c872ecc-6475-46b9-9d9d-41280873ea94","Type":"ContainerStarted","Data":"bde9ef28ef3bd0e3c5e6321a4e9a15fe7a65e5d844df196c62e66d522b6bb6ac"} Nov 24 01:39:43 crc kubenswrapper[4755]: I1124 01:39:43.278571 4755 generic.go:334] "Generic (PLEG): container finished" podID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerID="6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44" exitCode=0 Nov 24 01:39:43 crc kubenswrapper[4755]: I1124 01:39:43.278775 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9gwb" event={"ID":"2c872ecc-6475-46b9-9d9d-41280873ea94","Type":"ContainerDied","Data":"6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44"} Nov 24 01:39:44 crc kubenswrapper[4755]: I1124 01:39:44.288237 4755 generic.go:334] "Generic (PLEG): container finished" podID="dac685c9-1650-4372-9f79-0c359d3169eb" containerID="2bda82fa341e742fd03a9908e8f648bd6492de95ed83d8df50132360ee30746b" exitCode=0 Nov 24 01:39:44 crc kubenswrapper[4755]: I1124 01:39:44.288331 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" event={"ID":"dac685c9-1650-4372-9f79-0c359d3169eb","Type":"ContainerDied","Data":"2bda82fa341e742fd03a9908e8f648bd6492de95ed83d8df50132360ee30746b"} Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.304345 4755 generic.go:334] "Generic (PLEG): container finished" podID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerID="9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307" exitCode=0 Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.304476 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9gwb" event={"ID":"2c872ecc-6475-46b9-9d9d-41280873ea94","Type":"ContainerDied","Data":"9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307"} Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.750231 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.811464 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d84pq\" (UniqueName: \"kubernetes.io/projected/dac685c9-1650-4372-9f79-0c359d3169eb-kube-api-access-d84pq\") pod \"dac685c9-1650-4372-9f79-0c359d3169eb\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.811517 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-ssh-key\") pod \"dac685c9-1650-4372-9f79-0c359d3169eb\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.811645 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-inventory\") pod \"dac685c9-1650-4372-9f79-0c359d3169eb\" (UID: \"dac685c9-1650-4372-9f79-0c359d3169eb\") " Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.818074 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dac685c9-1650-4372-9f79-0c359d3169eb-kube-api-access-d84pq" (OuterVolumeSpecName: "kube-api-access-d84pq") pod "dac685c9-1650-4372-9f79-0c359d3169eb" (UID: "dac685c9-1650-4372-9f79-0c359d3169eb"). InnerVolumeSpecName "kube-api-access-d84pq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.855766 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "dac685c9-1650-4372-9f79-0c359d3169eb" (UID: "dac685c9-1650-4372-9f79-0c359d3169eb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.857710 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-inventory" (OuterVolumeSpecName: "inventory") pod "dac685c9-1650-4372-9f79-0c359d3169eb" (UID: "dac685c9-1650-4372-9f79-0c359d3169eb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.914431 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.914465 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dac685c9-1650-4372-9f79-0c359d3169eb-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:39:45 crc kubenswrapper[4755]: I1124 01:39:45.914477 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d84pq\" (UniqueName: \"kubernetes.io/projected/dac685c9-1650-4372-9f79-0c359d3169eb-kube-api-access-d84pq\") on node \"crc\" DevicePath \"\"" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.316896 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9gwb" event={"ID":"2c872ecc-6475-46b9-9d9d-41280873ea94","Type":"ContainerStarted","Data":"22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b"} Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.319785 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" event={"ID":"dac685c9-1650-4372-9f79-0c359d3169eb","Type":"ContainerDied","Data":"fb1caf39ee0ac01a4a109f0941713dd90d38d92baaa9b215677765a8da8f0492"} Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.319820 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb1caf39ee0ac01a4a109f0941713dd90d38d92baaa9b215677765a8da8f0492" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.319866 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-phn9q" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.363641 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d9gwb" podStartSLOduration=2.942695949 podStartE2EDuration="5.363623442s" podCreationTimestamp="2025-11-24 01:39:41 +0000 UTC" firstStartedPulling="2025-11-24 01:39:43.280881517 +0000 UTC m=+1607.966947028" lastFinishedPulling="2025-11-24 01:39:45.70180902 +0000 UTC m=+1610.387874521" observedRunningTime="2025-11-24 01:39:46.363404836 +0000 UTC m=+1611.049470337" watchObservedRunningTime="2025-11-24 01:39:46.363623442 +0000 UTC m=+1611.049688943" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.439819 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9"] Nov 24 01:39:46 crc kubenswrapper[4755]: E1124 01:39:46.440199 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dac685c9-1650-4372-9f79-0c359d3169eb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.440216 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dac685c9-1650-4372-9f79-0c359d3169eb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.440432 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dac685c9-1650-4372-9f79-0c359d3169eb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.441053 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.446380 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.446737 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.447059 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.463432 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.477172 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9"] Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.528042 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c9pc\" (UniqueName: \"kubernetes.io/projected/2b4a98ef-0655-4257-be72-766516d54fc4-kube-api-access-5c9pc\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v9bk9\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.528217 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v9bk9\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.528319 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v9bk9\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.629885 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c9pc\" (UniqueName: \"kubernetes.io/projected/2b4a98ef-0655-4257-be72-766516d54fc4-kube-api-access-5c9pc\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v9bk9\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.630144 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v9bk9\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.630275 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v9bk9\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.635230 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v9bk9\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.635274 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v9bk9\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.646457 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c9pc\" (UniqueName: \"kubernetes.io/projected/2b4a98ef-0655-4257-be72-766516d54fc4-kube-api-access-5c9pc\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v9bk9\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:46 crc kubenswrapper[4755]: I1124 01:39:46.768484 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:39:47 crc kubenswrapper[4755]: I1124 01:39:47.322580 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9"] Nov 24 01:39:48 crc kubenswrapper[4755]: I1124 01:39:48.339996 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" event={"ID":"2b4a98ef-0655-4257-be72-766516d54fc4","Type":"ContainerStarted","Data":"f565643976d82e53d32de728f756c5a52bc9c9006dba99c07d9575cbc3e39cd2"} Nov 24 01:39:48 crc kubenswrapper[4755]: I1124 01:39:48.340328 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" event={"ID":"2b4a98ef-0655-4257-be72-766516d54fc4","Type":"ContainerStarted","Data":"b8dbd122d1ef31b9a66733ba8d76b2572757a52fd0800009bfcf83e5446c927e"} Nov 24 01:39:48 crc kubenswrapper[4755]: I1124 01:39:48.364831 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" podStartSLOduration=1.951346913 podStartE2EDuration="2.364811182s" podCreationTimestamp="2025-11-24 01:39:46 +0000 UTC" firstStartedPulling="2025-11-24 01:39:47.328840934 +0000 UTC m=+1612.014906435" lastFinishedPulling="2025-11-24 01:39:47.742305213 +0000 UTC m=+1612.428370704" observedRunningTime="2025-11-24 01:39:48.354735352 +0000 UTC m=+1613.040800853" watchObservedRunningTime="2025-11-24 01:39:48.364811182 +0000 UTC m=+1613.050876693" Nov 24 01:39:50 crc kubenswrapper[4755]: I1124 01:39:50.996299 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:39:50 crc kubenswrapper[4755]: E1124 01:39:50.996833 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:39:51 crc kubenswrapper[4755]: I1124 01:39:51.395998 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:51 crc kubenswrapper[4755]: I1124 01:39:51.396338 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:51 crc kubenswrapper[4755]: I1124 01:39:51.482896 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:52 crc kubenswrapper[4755]: I1124 01:39:52.448671 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:52 crc kubenswrapper[4755]: I1124 01:39:52.501181 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d9gwb"] Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.389820 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d9gwb" podUID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerName="registry-server" containerID="cri-o://22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b" gracePeriod=2 Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.836317 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.897230 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzxhs\" (UniqueName: \"kubernetes.io/projected/2c872ecc-6475-46b9-9d9d-41280873ea94-kube-api-access-mzxhs\") pod \"2c872ecc-6475-46b9-9d9d-41280873ea94\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.897354 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-utilities\") pod \"2c872ecc-6475-46b9-9d9d-41280873ea94\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.897391 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-catalog-content\") pod \"2c872ecc-6475-46b9-9d9d-41280873ea94\" (UID: \"2c872ecc-6475-46b9-9d9d-41280873ea94\") " Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.898309 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-utilities" (OuterVolumeSpecName: "utilities") pod "2c872ecc-6475-46b9-9d9d-41280873ea94" (UID: "2c872ecc-6475-46b9-9d9d-41280873ea94"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.902981 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c872ecc-6475-46b9-9d9d-41280873ea94-kube-api-access-mzxhs" (OuterVolumeSpecName: "kube-api-access-mzxhs") pod "2c872ecc-6475-46b9-9d9d-41280873ea94" (UID: "2c872ecc-6475-46b9-9d9d-41280873ea94"). InnerVolumeSpecName "kube-api-access-mzxhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.960963 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c872ecc-6475-46b9-9d9d-41280873ea94" (UID: "2c872ecc-6475-46b9-9d9d-41280873ea94"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.999812 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzxhs\" (UniqueName: \"kubernetes.io/projected/2c872ecc-6475-46b9-9d9d-41280873ea94-kube-api-access-mzxhs\") on node \"crc\" DevicePath \"\"" Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.999844 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:39:54 crc kubenswrapper[4755]: I1124 01:39:54.999857 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c872ecc-6475-46b9-9d9d-41280873ea94-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.400735 4755 generic.go:334] "Generic (PLEG): container finished" podID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerID="22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b" exitCode=0 Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.400780 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9gwb" event={"ID":"2c872ecc-6475-46b9-9d9d-41280873ea94","Type":"ContainerDied","Data":"22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b"} Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.400836 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9gwb" event={"ID":"2c872ecc-6475-46b9-9d9d-41280873ea94","Type":"ContainerDied","Data":"bde9ef28ef3bd0e3c5e6321a4e9a15fe7a65e5d844df196c62e66d522b6bb6ac"} Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.400853 4755 scope.go:117] "RemoveContainer" containerID="22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b" Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.400961 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d9gwb" Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.452675 4755 scope.go:117] "RemoveContainer" containerID="9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307" Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.464459 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d9gwb"] Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.473903 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d9gwb"] Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.479449 4755 scope.go:117] "RemoveContainer" containerID="6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44" Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.519558 4755 scope.go:117] "RemoveContainer" containerID="22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b" Nov 24 01:39:55 crc kubenswrapper[4755]: E1124 01:39:55.520123 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b\": container with ID starting with 22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b not found: ID does not exist" containerID="22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b" Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.520155 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b"} err="failed to get container status \"22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b\": rpc error: code = NotFound desc = could not find container \"22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b\": container with ID starting with 22e178f88fa4defef17f238a86515a13bc089041c0f1b36af271c2346f3cbe3b not found: ID does not exist" Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.520178 4755 scope.go:117] "RemoveContainer" containerID="9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307" Nov 24 01:39:55 crc kubenswrapper[4755]: E1124 01:39:55.520587 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307\": container with ID starting with 9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307 not found: ID does not exist" containerID="9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307" Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.520646 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307"} err="failed to get container status \"9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307\": rpc error: code = NotFound desc = could not find container \"9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307\": container with ID starting with 9923196544d1a822fb488fbab669752ae3a4bb7c1332b366370fe281474dc307 not found: ID does not exist" Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.520681 4755 scope.go:117] "RemoveContainer" containerID="6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44" Nov 24 01:39:55 crc kubenswrapper[4755]: E1124 01:39:55.520985 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44\": container with ID starting with 6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44 not found: ID does not exist" containerID="6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44" Nov 24 01:39:55 crc kubenswrapper[4755]: I1124 01:39:55.521007 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44"} err="failed to get container status \"6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44\": rpc error: code = NotFound desc = could not find container \"6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44\": container with ID starting with 6a3df2775eb08e24bca7ba18b6771949c24d97d3b3e3de23e8cd81edcbb83b44 not found: ID does not exist" Nov 24 01:39:56 crc kubenswrapper[4755]: I1124 01:39:56.014238 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c872ecc-6475-46b9-9d9d-41280873ea94" path="/var/lib/kubelet/pods/2c872ecc-6475-46b9-9d9d-41280873ea94/volumes" Nov 24 01:39:59 crc kubenswrapper[4755]: I1124 01:39:59.045995 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-5vmsg"] Nov 24 01:39:59 crc kubenswrapper[4755]: I1124 01:39:59.052648 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-5vmsg"] Nov 24 01:40:00 crc kubenswrapper[4755]: I1124 01:40:00.012003 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd35021d-eaba-4748-b7b5-371fac220837" path="/var/lib/kubelet/pods/fd35021d-eaba-4748-b7b5-371fac220837/volumes" Nov 24 01:40:00 crc kubenswrapper[4755]: I1124 01:40:00.037266 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-497f-account-create-vxldj"] Nov 24 01:40:00 crc kubenswrapper[4755]: I1124 01:40:00.046688 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-d94f-account-create-jhmwf"] Nov 24 01:40:00 crc kubenswrapper[4755]: I1124 01:40:00.056914 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-497f-account-create-vxldj"] Nov 24 01:40:00 crc kubenswrapper[4755]: I1124 01:40:00.065463 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-d94f-account-create-jhmwf"] Nov 24 01:40:01 crc kubenswrapper[4755]: I1124 01:40:01.031324 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-b37d-account-create-72hxc"] Nov 24 01:40:01 crc kubenswrapper[4755]: I1124 01:40:01.040854 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-2brc6"] Nov 24 01:40:01 crc kubenswrapper[4755]: I1124 01:40:01.058208 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-frffp"] Nov 24 01:40:01 crc kubenswrapper[4755]: I1124 01:40:01.065320 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-b37d-account-create-72hxc"] Nov 24 01:40:01 crc kubenswrapper[4755]: I1124 01:40:01.072050 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-2brc6"] Nov 24 01:40:01 crc kubenswrapper[4755]: I1124 01:40:01.079396 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-frffp"] Nov 24 01:40:02 crc kubenswrapper[4755]: I1124 01:40:02.014350 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87ee57db-6666-4ced-b558-accfa958ce55" path="/var/lib/kubelet/pods/87ee57db-6666-4ced-b558-accfa958ce55/volumes" Nov 24 01:40:02 crc kubenswrapper[4755]: I1124 01:40:02.015251 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc52419e-19f8-4cd9-a1ed-7286e096d6cf" path="/var/lib/kubelet/pods/bc52419e-19f8-4cd9-a1ed-7286e096d6cf/volumes" Nov 24 01:40:02 crc kubenswrapper[4755]: I1124 01:40:02.015979 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddadc31c-aad5-4896-84da-4fbe82710d53" path="/var/lib/kubelet/pods/ddadc31c-aad5-4896-84da-4fbe82710d53/volumes" Nov 24 01:40:02 crc kubenswrapper[4755]: I1124 01:40:02.016793 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e60b8f75-376b-4ba1-9b41-5f334cec157f" path="/var/lib/kubelet/pods/e60b8f75-376b-4ba1-9b41-5f334cec157f/volumes" Nov 24 01:40:02 crc kubenswrapper[4755]: I1124 01:40:02.018185 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdf82262-5b8f-4ee2-88b6-494e6f4d5b58" path="/var/lib/kubelet/pods/fdf82262-5b8f-4ee2-88b6-494e6f4d5b58/volumes" Nov 24 01:40:03 crc kubenswrapper[4755]: I1124 01:40:03.132021 4755 scope.go:117] "RemoveContainer" containerID="610fc7fd70375ef61dbd4e7e904aa258c8763b4d64281d0392a034af90136bc4" Nov 24 01:40:03 crc kubenswrapper[4755]: I1124 01:40:03.159442 4755 scope.go:117] "RemoveContainer" containerID="e32ed86fe959509af9beee44386b6725f66c514d040e0677c250079e6ca2540b" Nov 24 01:40:03 crc kubenswrapper[4755]: I1124 01:40:03.202885 4755 scope.go:117] "RemoveContainer" containerID="572ccff292dac1ea788cc9687596e62ec88e6173f6abdd705d35ae407f3de48e" Nov 24 01:40:03 crc kubenswrapper[4755]: I1124 01:40:03.259348 4755 scope.go:117] "RemoveContainer" containerID="f24356acf661f84dbe884261cd12ffa9933a5b203528afcfd35dca619f59fd09" Nov 24 01:40:03 crc kubenswrapper[4755]: I1124 01:40:03.291417 4755 scope.go:117] "RemoveContainer" containerID="8ec192c49adc214e823e98d164706848cc2640b6548607b86b281f0e3088b1f2" Nov 24 01:40:03 crc kubenswrapper[4755]: I1124 01:40:03.347443 4755 scope.go:117] "RemoveContainer" containerID="65e6aaee4313f1eabcd359c80695c88849bb98db23790e80c527eb075b29b021" Nov 24 01:40:03 crc kubenswrapper[4755]: I1124 01:40:03.411371 4755 scope.go:117] "RemoveContainer" containerID="18cd368a49b23980abf3dddf8124917b071d5644cb9750961e3bf44d114122f5" Nov 24 01:40:03 crc kubenswrapper[4755]: I1124 01:40:03.437307 4755 scope.go:117] "RemoveContainer" containerID="08bbc10523221ffa949c537018dab1ee193ae53e6d68b8d22ed837852488c548" Nov 24 01:40:03 crc kubenswrapper[4755]: I1124 01:40:03.463664 4755 scope.go:117] "RemoveContainer" containerID="eff9cc0342e0869618d644a3b0b2d906ff00fdc000c45039d9e76333aadbcc40" Nov 24 01:40:03 crc kubenswrapper[4755]: I1124 01:40:03.497139 4755 scope.go:117] "RemoveContainer" containerID="5165a6dd847c67e345aebca9c6fb54cf8252bf310676fad9aa6110a0b068459c" Nov 24 01:40:04 crc kubenswrapper[4755]: I1124 01:40:04.997386 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:40:04 crc kubenswrapper[4755]: E1124 01:40:04.997899 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:40:16 crc kubenswrapper[4755]: I1124 01:40:16.002805 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:40:16 crc kubenswrapper[4755]: E1124 01:40:16.003538 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:40:26 crc kubenswrapper[4755]: I1124 01:40:26.996415 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:40:26 crc kubenswrapper[4755]: E1124 01:40:26.997366 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:40:27 crc kubenswrapper[4755]: I1124 01:40:27.041075 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hqx5z"] Nov 24 01:40:27 crc kubenswrapper[4755]: I1124 01:40:27.050739 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hqx5z"] Nov 24 01:40:27 crc kubenswrapper[4755]: I1124 01:40:27.109536 4755 generic.go:334] "Generic (PLEG): container finished" podID="2b4a98ef-0655-4257-be72-766516d54fc4" containerID="f565643976d82e53d32de728f756c5a52bc9c9006dba99c07d9575cbc3e39cd2" exitCode=0 Nov 24 01:40:27 crc kubenswrapper[4755]: I1124 01:40:27.109571 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" event={"ID":"2b4a98ef-0655-4257-be72-766516d54fc4","Type":"ContainerDied","Data":"f565643976d82e53d32de728f756c5a52bc9c9006dba99c07d9575cbc3e39cd2"} Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.017898 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8965375e-ebca-4829-8445-54dabd02845f" path="/var/lib/kubelet/pods/8965375e-ebca-4829-8445-54dabd02845f/volumes" Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.562234 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.707328 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-inventory\") pod \"2b4a98ef-0655-4257-be72-766516d54fc4\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.707473 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-ssh-key\") pod \"2b4a98ef-0655-4257-be72-766516d54fc4\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.707639 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5c9pc\" (UniqueName: \"kubernetes.io/projected/2b4a98ef-0655-4257-be72-766516d54fc4-kube-api-access-5c9pc\") pod \"2b4a98ef-0655-4257-be72-766516d54fc4\" (UID: \"2b4a98ef-0655-4257-be72-766516d54fc4\") " Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.713790 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b4a98ef-0655-4257-be72-766516d54fc4-kube-api-access-5c9pc" (OuterVolumeSpecName: "kube-api-access-5c9pc") pod "2b4a98ef-0655-4257-be72-766516d54fc4" (UID: "2b4a98ef-0655-4257-be72-766516d54fc4"). InnerVolumeSpecName "kube-api-access-5c9pc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.735188 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2b4a98ef-0655-4257-be72-766516d54fc4" (UID: "2b4a98ef-0655-4257-be72-766516d54fc4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.738336 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-inventory" (OuterVolumeSpecName: "inventory") pod "2b4a98ef-0655-4257-be72-766516d54fc4" (UID: "2b4a98ef-0655-4257-be72-766516d54fc4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.812323 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.812382 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2b4a98ef-0655-4257-be72-766516d54fc4-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:40:28 crc kubenswrapper[4755]: I1124 01:40:28.812397 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5c9pc\" (UniqueName: \"kubernetes.io/projected/2b4a98ef-0655-4257-be72-766516d54fc4-kube-api-access-5c9pc\") on node \"crc\" DevicePath \"\"" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.127821 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" event={"ID":"2b4a98ef-0655-4257-be72-766516d54fc4","Type":"ContainerDied","Data":"b8dbd122d1ef31b9a66733ba8d76b2572757a52fd0800009bfcf83e5446c927e"} Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.127878 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8dbd122d1ef31b9a66733ba8d76b2572757a52fd0800009bfcf83e5446c927e" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.127955 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v9bk9" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.260984 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb"] Nov 24 01:40:29 crc kubenswrapper[4755]: E1124 01:40:29.261661 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b4a98ef-0655-4257-be72-766516d54fc4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.261683 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b4a98ef-0655-4257-be72-766516d54fc4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:40:29 crc kubenswrapper[4755]: E1124 01:40:29.261701 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerName="extract-content" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.261708 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerName="extract-content" Nov 24 01:40:29 crc kubenswrapper[4755]: E1124 01:40:29.261719 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerName="registry-server" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.261725 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerName="registry-server" Nov 24 01:40:29 crc kubenswrapper[4755]: E1124 01:40:29.261737 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerName="extract-utilities" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.261742 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerName="extract-utilities" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.261907 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b4a98ef-0655-4257-be72-766516d54fc4" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.261925 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c872ecc-6475-46b9-9d9d-41280873ea94" containerName="registry-server" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.262509 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.264355 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.264537 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.265217 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.267648 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.280402 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb"] Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.426412 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.426476 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd9dp\" (UniqueName: \"kubernetes.io/projected/4afec439-5744-46c0-a074-88c86ac07fbe-kube-api-access-hd9dp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.426516 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.528265 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.528355 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd9dp\" (UniqueName: \"kubernetes.io/projected/4afec439-5744-46c0-a074-88c86ac07fbe-kube-api-access-hd9dp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.528389 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.534952 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.542227 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.545327 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd9dp\" (UniqueName: \"kubernetes.io/projected/4afec439-5744-46c0-a074-88c86ac07fbe-kube-api-access-hd9dp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:29 crc kubenswrapper[4755]: I1124 01:40:29.589108 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:40:30 crc kubenswrapper[4755]: I1124 01:40:30.089902 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb"] Nov 24 01:40:30 crc kubenswrapper[4755]: W1124 01:40:30.093822 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4afec439_5744_46c0_a074_88c86ac07fbe.slice/crio-4a22f491949f2d6bc6f0578638bb9373e8e385d827debb5666d2f74cbec5a876 WatchSource:0}: Error finding container 4a22f491949f2d6bc6f0578638bb9373e8e385d827debb5666d2f74cbec5a876: Status 404 returned error can't find the container with id 4a22f491949f2d6bc6f0578638bb9373e8e385d827debb5666d2f74cbec5a876 Nov 24 01:40:30 crc kubenswrapper[4755]: I1124 01:40:30.141762 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" event={"ID":"4afec439-5744-46c0-a074-88c86ac07fbe","Type":"ContainerStarted","Data":"4a22f491949f2d6bc6f0578638bb9373e8e385d827debb5666d2f74cbec5a876"} Nov 24 01:40:31 crc kubenswrapper[4755]: I1124 01:40:31.155271 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" event={"ID":"4afec439-5744-46c0-a074-88c86ac07fbe","Type":"ContainerStarted","Data":"e0342c906858a08109e3d139f887bd92802b8fed8f69928de14fa54eb526ef3f"} Nov 24 01:40:31 crc kubenswrapper[4755]: I1124 01:40:31.176637 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" podStartSLOduration=1.638986376 podStartE2EDuration="2.176584975s" podCreationTimestamp="2025-11-24 01:40:29 +0000 UTC" firstStartedPulling="2025-11-24 01:40:30.097104296 +0000 UTC m=+1654.783169827" lastFinishedPulling="2025-11-24 01:40:30.634702925 +0000 UTC m=+1655.320768426" observedRunningTime="2025-11-24 01:40:31.17390928 +0000 UTC m=+1655.859974801" watchObservedRunningTime="2025-11-24 01:40:31.176584975 +0000 UTC m=+1655.862650516" Nov 24 01:40:41 crc kubenswrapper[4755]: I1124 01:40:41.996914 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:40:41 crc kubenswrapper[4755]: E1124 01:40:41.997715 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:40:45 crc kubenswrapper[4755]: I1124 01:40:45.047435 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-xrqj7"] Nov 24 01:40:45 crc kubenswrapper[4755]: I1124 01:40:45.058172 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hggks"] Nov 24 01:40:45 crc kubenswrapper[4755]: I1124 01:40:45.068235 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-xrqj7"] Nov 24 01:40:45 crc kubenswrapper[4755]: I1124 01:40:45.074471 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hggks"] Nov 24 01:40:46 crc kubenswrapper[4755]: I1124 01:40:46.010670 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2efb1e5a-1455-4b1e-892e-ee86d4fdf50b" path="/var/lib/kubelet/pods/2efb1e5a-1455-4b1e-892e-ee86d4fdf50b/volumes" Nov 24 01:40:46 crc kubenswrapper[4755]: I1124 01:40:46.011454 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c211ce7-93af-43de-ab63-044b93a27473" path="/var/lib/kubelet/pods/6c211ce7-93af-43de-ab63-044b93a27473/volumes" Nov 24 01:40:56 crc kubenswrapper[4755]: I1124 01:40:56.008658 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:40:56 crc kubenswrapper[4755]: E1124 01:40:56.009504 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:41:03 crc kubenswrapper[4755]: I1124 01:41:03.716466 4755 scope.go:117] "RemoveContainer" containerID="c317437f3833ffa9c4bba705ee2105f6533d6147f55bd6b859fe9bdc40389d93" Nov 24 01:41:03 crc kubenswrapper[4755]: I1124 01:41:03.775967 4755 scope.go:117] "RemoveContainer" containerID="7d17fdd1cec8829e48d38f4960e052fcc869568b66f24d506522812892416037" Nov 24 01:41:03 crc kubenswrapper[4755]: I1124 01:41:03.831378 4755 scope.go:117] "RemoveContainer" containerID="04d269e7b1d1837f19b589cec6c6436f35986ca8e4dafe52f5060dc15a29a6fe" Nov 24 01:41:06 crc kubenswrapper[4755]: I1124 01:41:06.996645 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:41:06 crc kubenswrapper[4755]: E1124 01:41:06.997329 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:41:18 crc kubenswrapper[4755]: I1124 01:41:18.996570 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:41:18 crc kubenswrapper[4755]: E1124 01:41:18.997514 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:41:21 crc kubenswrapper[4755]: I1124 01:41:21.639792 4755 generic.go:334] "Generic (PLEG): container finished" podID="4afec439-5744-46c0-a074-88c86ac07fbe" containerID="e0342c906858a08109e3d139f887bd92802b8fed8f69928de14fa54eb526ef3f" exitCode=0 Nov 24 01:41:21 crc kubenswrapper[4755]: I1124 01:41:21.639885 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" event={"ID":"4afec439-5744-46c0-a074-88c86ac07fbe","Type":"ContainerDied","Data":"e0342c906858a08109e3d139f887bd92802b8fed8f69928de14fa54eb526ef3f"} Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.098278 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.233193 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hd9dp\" (UniqueName: \"kubernetes.io/projected/4afec439-5744-46c0-a074-88c86ac07fbe-kube-api-access-hd9dp\") pod \"4afec439-5744-46c0-a074-88c86ac07fbe\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.233247 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-ssh-key\") pod \"4afec439-5744-46c0-a074-88c86ac07fbe\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.233299 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-inventory\") pod \"4afec439-5744-46c0-a074-88c86ac07fbe\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.238841 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4afec439-5744-46c0-a074-88c86ac07fbe-kube-api-access-hd9dp" (OuterVolumeSpecName: "kube-api-access-hd9dp") pod "4afec439-5744-46c0-a074-88c86ac07fbe" (UID: "4afec439-5744-46c0-a074-88c86ac07fbe"). InnerVolumeSpecName "kube-api-access-hd9dp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:41:23 crc kubenswrapper[4755]: E1124 01:41:23.263973 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-ssh-key podName:4afec439-5744-46c0-a074-88c86ac07fbe nodeName:}" failed. No retries permitted until 2025-11-24 01:41:23.763942495 +0000 UTC m=+1708.450008006 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key" (UniqueName: "kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-ssh-key") pod "4afec439-5744-46c0-a074-88c86ac07fbe" (UID: "4afec439-5744-46c0-a074-88c86ac07fbe") : error deleting /var/lib/kubelet/pods/4afec439-5744-46c0-a074-88c86ac07fbe/volume-subpaths: remove /var/lib/kubelet/pods/4afec439-5744-46c0-a074-88c86ac07fbe/volume-subpaths: no such file or directory Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.266971 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-inventory" (OuterVolumeSpecName: "inventory") pod "4afec439-5744-46c0-a074-88c86ac07fbe" (UID: "4afec439-5744-46c0-a074-88c86ac07fbe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.335829 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hd9dp\" (UniqueName: \"kubernetes.io/projected/4afec439-5744-46c0-a074-88c86ac07fbe-kube-api-access-hd9dp\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.335870 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.666352 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" event={"ID":"4afec439-5744-46c0-a074-88c86ac07fbe","Type":"ContainerDied","Data":"4a22f491949f2d6bc6f0578638bb9373e8e385d827debb5666d2f74cbec5a876"} Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.666398 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a22f491949f2d6bc6f0578638bb9373e8e385d827debb5666d2f74cbec5a876" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.666471 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.737777 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-bvm62"] Nov 24 01:41:23 crc kubenswrapper[4755]: E1124 01:41:23.738155 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4afec439-5744-46c0-a074-88c86ac07fbe" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.738168 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4afec439-5744-46c0-a074-88c86ac07fbe" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.738335 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4afec439-5744-46c0-a074-88c86ac07fbe" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.738936 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.752189 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-bvm62"] Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.846225 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-ssh-key\") pod \"4afec439-5744-46c0-a074-88c86ac07fbe\" (UID: \"4afec439-5744-46c0-a074-88c86ac07fbe\") " Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.846687 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-bvm62\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.846833 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-bvm62\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.847124 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79d7m\" (UniqueName: \"kubernetes.io/projected/bca53d6d-913f-408e-a979-2515d6ee4c8e-kube-api-access-79d7m\") pod \"ssh-known-hosts-edpm-deployment-bvm62\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.851170 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4afec439-5744-46c0-a074-88c86ac07fbe" (UID: "4afec439-5744-46c0-a074-88c86ac07fbe"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.949266 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79d7m\" (UniqueName: \"kubernetes.io/projected/bca53d6d-913f-408e-a979-2515d6ee4c8e-kube-api-access-79d7m\") pod \"ssh-known-hosts-edpm-deployment-bvm62\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.949878 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-bvm62\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.950331 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-bvm62\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.950551 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4afec439-5744-46c0-a074-88c86ac07fbe-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.954668 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-bvm62\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.955256 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-bvm62\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:23 crc kubenswrapper[4755]: I1124 01:41:23.976509 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79d7m\" (UniqueName: \"kubernetes.io/projected/bca53d6d-913f-408e-a979-2515d6ee4c8e-kube-api-access-79d7m\") pod \"ssh-known-hosts-edpm-deployment-bvm62\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:24 crc kubenswrapper[4755]: I1124 01:41:24.063444 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:24 crc kubenswrapper[4755]: I1124 01:41:24.597140 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-bvm62"] Nov 24 01:41:24 crc kubenswrapper[4755]: I1124 01:41:24.679718 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" event={"ID":"bca53d6d-913f-408e-a979-2515d6ee4c8e","Type":"ContainerStarted","Data":"176ce6b0fdf3ac81b50687d67f4a5aa7f541a63d907a14b07012fd2125da3c0f"} Nov 24 01:41:25 crc kubenswrapper[4755]: I1124 01:41:25.704818 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" event={"ID":"bca53d6d-913f-408e-a979-2515d6ee4c8e","Type":"ContainerStarted","Data":"cf8af376a0df774ab6c0a3ee1d77fac48c944c3d7ee4542179804ce1979f082d"} Nov 24 01:41:25 crc kubenswrapper[4755]: I1124 01:41:25.724285 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" podStartSLOduration=2.206821379 podStartE2EDuration="2.724266775s" podCreationTimestamp="2025-11-24 01:41:23 +0000 UTC" firstStartedPulling="2025-11-24 01:41:24.60064625 +0000 UTC m=+1709.286711751" lastFinishedPulling="2025-11-24 01:41:25.118091646 +0000 UTC m=+1709.804157147" observedRunningTime="2025-11-24 01:41:25.723681669 +0000 UTC m=+1710.409747180" watchObservedRunningTime="2025-11-24 01:41:25.724266775 +0000 UTC m=+1710.410332266" Nov 24 01:41:29 crc kubenswrapper[4755]: I1124 01:41:29.044387 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-b2bn7"] Nov 24 01:41:29 crc kubenswrapper[4755]: I1124 01:41:29.053673 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-b2bn7"] Nov 24 01:41:30 crc kubenswrapper[4755]: I1124 01:41:30.013472 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f" path="/var/lib/kubelet/pods/ee0ab240-2dbe-4d54-9b69-af5e94e2fd0f/volumes" Nov 24 01:41:30 crc kubenswrapper[4755]: I1124 01:41:30.997464 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:41:30 crc kubenswrapper[4755]: E1124 01:41:30.998191 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:41:32 crc kubenswrapper[4755]: I1124 01:41:32.760820 4755 generic.go:334] "Generic (PLEG): container finished" podID="bca53d6d-913f-408e-a979-2515d6ee4c8e" containerID="cf8af376a0df774ab6c0a3ee1d77fac48c944c3d7ee4542179804ce1979f082d" exitCode=0 Nov 24 01:41:32 crc kubenswrapper[4755]: I1124 01:41:32.760895 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" event={"ID":"bca53d6d-913f-408e-a979-2515d6ee4c8e","Type":"ContainerDied","Data":"cf8af376a0df774ab6c0a3ee1d77fac48c944c3d7ee4542179804ce1979f082d"} Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.177587 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.249005 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79d7m\" (UniqueName: \"kubernetes.io/projected/bca53d6d-913f-408e-a979-2515d6ee4c8e-kube-api-access-79d7m\") pod \"bca53d6d-913f-408e-a979-2515d6ee4c8e\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.249163 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-ssh-key-openstack-edpm-ipam\") pod \"bca53d6d-913f-408e-a979-2515d6ee4c8e\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.249202 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-inventory-0\") pod \"bca53d6d-913f-408e-a979-2515d6ee4c8e\" (UID: \"bca53d6d-913f-408e-a979-2515d6ee4c8e\") " Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.255588 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bca53d6d-913f-408e-a979-2515d6ee4c8e-kube-api-access-79d7m" (OuterVolumeSpecName: "kube-api-access-79d7m") pod "bca53d6d-913f-408e-a979-2515d6ee4c8e" (UID: "bca53d6d-913f-408e-a979-2515d6ee4c8e"). InnerVolumeSpecName "kube-api-access-79d7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.278247 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "bca53d6d-913f-408e-a979-2515d6ee4c8e" (UID: "bca53d6d-913f-408e-a979-2515d6ee4c8e"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.282514 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "bca53d6d-913f-408e-a979-2515d6ee4c8e" (UID: "bca53d6d-913f-408e-a979-2515d6ee4c8e"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.351268 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.351524 4755 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bca53d6d-913f-408e-a979-2515d6ee4c8e-inventory-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.351545 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79d7m\" (UniqueName: \"kubernetes.io/projected/bca53d6d-913f-408e-a979-2515d6ee4c8e-kube-api-access-79d7m\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.778255 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" event={"ID":"bca53d6d-913f-408e-a979-2515d6ee4c8e","Type":"ContainerDied","Data":"176ce6b0fdf3ac81b50687d67f4a5aa7f541a63d907a14b07012fd2125da3c0f"} Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.778300 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="176ce6b0fdf3ac81b50687d67f4a5aa7f541a63d907a14b07012fd2125da3c0f" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.778379 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bvm62" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.865967 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp"] Nov 24 01:41:34 crc kubenswrapper[4755]: E1124 01:41:34.866542 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bca53d6d-913f-408e-a979-2515d6ee4c8e" containerName="ssh-known-hosts-edpm-deployment" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.866555 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="bca53d6d-913f-408e-a979-2515d6ee4c8e" containerName="ssh-known-hosts-edpm-deployment" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.866939 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="bca53d6d-913f-408e-a979-2515d6ee4c8e" containerName="ssh-known-hosts-edpm-deployment" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.867797 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.870503 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.870683 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.870752 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.870840 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.880320 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp"] Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.972202 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnjgp\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.972844 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb85w\" (UniqueName: \"kubernetes.io/projected/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-kube-api-access-cb85w\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnjgp\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:34 crc kubenswrapper[4755]: I1124 01:41:34.973026 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnjgp\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:35 crc kubenswrapper[4755]: I1124 01:41:35.075494 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb85w\" (UniqueName: \"kubernetes.io/projected/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-kube-api-access-cb85w\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnjgp\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:35 crc kubenswrapper[4755]: I1124 01:41:35.075687 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnjgp\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:35 crc kubenswrapper[4755]: I1124 01:41:35.075750 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnjgp\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:35 crc kubenswrapper[4755]: I1124 01:41:35.081889 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnjgp\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:35 crc kubenswrapper[4755]: I1124 01:41:35.082294 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnjgp\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:35 crc kubenswrapper[4755]: I1124 01:41:35.094723 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb85w\" (UniqueName: \"kubernetes.io/projected/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-kube-api-access-cb85w\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnjgp\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:35 crc kubenswrapper[4755]: I1124 01:41:35.195037 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:35 crc kubenswrapper[4755]: I1124 01:41:35.721791 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp"] Nov 24 01:41:35 crc kubenswrapper[4755]: I1124 01:41:35.790169 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" event={"ID":"6f8286f1-2efd-487d-9feb-fe2eb1fa0112","Type":"ContainerStarted","Data":"0cce8852007b17487b10870b682f29b24ba0cb876b8d0ec88890902061d089d8"} Nov 24 01:41:36 crc kubenswrapper[4755]: I1124 01:41:36.799935 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" event={"ID":"6f8286f1-2efd-487d-9feb-fe2eb1fa0112","Type":"ContainerStarted","Data":"2d87265bfa53eb91a9d5930fe253a8a9a413fccbd960c88828c7e170464c0c0a"} Nov 24 01:41:36 crc kubenswrapper[4755]: I1124 01:41:36.823417 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" podStartSLOduration=2.400242474 podStartE2EDuration="2.823396422s" podCreationTimestamp="2025-11-24 01:41:34 +0000 UTC" firstStartedPulling="2025-11-24 01:41:35.732337999 +0000 UTC m=+1720.418403520" lastFinishedPulling="2025-11-24 01:41:36.155491967 +0000 UTC m=+1720.841557468" observedRunningTime="2025-11-24 01:41:36.822763445 +0000 UTC m=+1721.508828946" watchObservedRunningTime="2025-11-24 01:41:36.823396422 +0000 UTC m=+1721.509461923" Nov 24 01:41:43 crc kubenswrapper[4755]: I1124 01:41:43.996882 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:41:43 crc kubenswrapper[4755]: E1124 01:41:43.997676 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:41:44 crc kubenswrapper[4755]: I1124 01:41:44.870399 4755 generic.go:334] "Generic (PLEG): container finished" podID="6f8286f1-2efd-487d-9feb-fe2eb1fa0112" containerID="2d87265bfa53eb91a9d5930fe253a8a9a413fccbd960c88828c7e170464c0c0a" exitCode=0 Nov 24 01:41:44 crc kubenswrapper[4755]: I1124 01:41:44.870495 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" event={"ID":"6f8286f1-2efd-487d-9feb-fe2eb1fa0112","Type":"ContainerDied","Data":"2d87265bfa53eb91a9d5930fe253a8a9a413fccbd960c88828c7e170464c0c0a"} Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.263645 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.388667 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-ssh-key\") pod \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.388805 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cb85w\" (UniqueName: \"kubernetes.io/projected/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-kube-api-access-cb85w\") pod \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.388949 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-inventory\") pod \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\" (UID: \"6f8286f1-2efd-487d-9feb-fe2eb1fa0112\") " Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.395588 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-kube-api-access-cb85w" (OuterVolumeSpecName: "kube-api-access-cb85w") pod "6f8286f1-2efd-487d-9feb-fe2eb1fa0112" (UID: "6f8286f1-2efd-487d-9feb-fe2eb1fa0112"). InnerVolumeSpecName "kube-api-access-cb85w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.419924 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6f8286f1-2efd-487d-9feb-fe2eb1fa0112" (UID: "6f8286f1-2efd-487d-9feb-fe2eb1fa0112"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.433324 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-inventory" (OuterVolumeSpecName: "inventory") pod "6f8286f1-2efd-487d-9feb-fe2eb1fa0112" (UID: "6f8286f1-2efd-487d-9feb-fe2eb1fa0112"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.490596 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.490655 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cb85w\" (UniqueName: \"kubernetes.io/projected/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-kube-api-access-cb85w\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.490668 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f8286f1-2efd-487d-9feb-fe2eb1fa0112-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.887234 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" event={"ID":"6f8286f1-2efd-487d-9feb-fe2eb1fa0112","Type":"ContainerDied","Data":"0cce8852007b17487b10870b682f29b24ba0cb876b8d0ec88890902061d089d8"} Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.887283 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cce8852007b17487b10870b682f29b24ba0cb876b8d0ec88890902061d089d8" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.887299 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnjgp" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.979584 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9"] Nov 24 01:41:46 crc kubenswrapper[4755]: E1124 01:41:46.980028 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f8286f1-2efd-487d-9feb-fe2eb1fa0112" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.980047 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f8286f1-2efd-487d-9feb-fe2eb1fa0112" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.980242 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f8286f1-2efd-487d-9feb-fe2eb1fa0112" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.980848 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.983721 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.983939 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.984041 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.984471 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:41:46 crc kubenswrapper[4755]: I1124 01:41:46.989622 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9"] Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.002852 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x49ff\" (UniqueName: \"kubernetes.io/projected/b2e96444-0ef2-436c-9641-b980fd1961d6-kube-api-access-x49ff\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.002985 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.003085 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.104930 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.105061 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.105123 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x49ff\" (UniqueName: \"kubernetes.io/projected/b2e96444-0ef2-436c-9641-b980fd1961d6-kube-api-access-x49ff\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.117368 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.122002 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.122926 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x49ff\" (UniqueName: \"kubernetes.io/projected/b2e96444-0ef2-436c-9641-b980fd1961d6-kube-api-access-x49ff\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.303814 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.850886 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9"] Nov 24 01:41:47 crc kubenswrapper[4755]: I1124 01:41:47.898900 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" event={"ID":"b2e96444-0ef2-436c-9641-b980fd1961d6","Type":"ContainerStarted","Data":"f22ab65b176e490b3087ed142a13115457fdd3ee2d718e8ce963a65ce23df844"} Nov 24 01:41:48 crc kubenswrapper[4755]: I1124 01:41:48.908530 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" event={"ID":"b2e96444-0ef2-436c-9641-b980fd1961d6","Type":"ContainerStarted","Data":"13b0ae3253d0fd892506b98e2060a8cacebdf99150085e1959a3a1b6e84dd147"} Nov 24 01:41:48 crc kubenswrapper[4755]: I1124 01:41:48.931141 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" podStartSLOduration=2.527286119 podStartE2EDuration="2.931121857s" podCreationTimestamp="2025-11-24 01:41:46 +0000 UTC" firstStartedPulling="2025-11-24 01:41:47.857567483 +0000 UTC m=+1732.543632984" lastFinishedPulling="2025-11-24 01:41:48.261403211 +0000 UTC m=+1732.947468722" observedRunningTime="2025-11-24 01:41:48.923793542 +0000 UTC m=+1733.609859063" watchObservedRunningTime="2025-11-24 01:41:48.931121857 +0000 UTC m=+1733.617187358" Nov 24 01:41:56 crc kubenswrapper[4755]: I1124 01:41:56.004233 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:41:56 crc kubenswrapper[4755]: E1124 01:41:56.007302 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:41:57 crc kubenswrapper[4755]: I1124 01:41:57.996245 4755 generic.go:334] "Generic (PLEG): container finished" podID="b2e96444-0ef2-436c-9641-b980fd1961d6" containerID="13b0ae3253d0fd892506b98e2060a8cacebdf99150085e1959a3a1b6e84dd147" exitCode=0 Nov 24 01:41:58 crc kubenswrapper[4755]: I1124 01:41:58.013223 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" event={"ID":"b2e96444-0ef2-436c-9641-b980fd1961d6","Type":"ContainerDied","Data":"13b0ae3253d0fd892506b98e2060a8cacebdf99150085e1959a3a1b6e84dd147"} Nov 24 01:41:59 crc kubenswrapper[4755]: I1124 01:41:59.459955 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:41:59 crc kubenswrapper[4755]: I1124 01:41:59.558545 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-ssh-key\") pod \"b2e96444-0ef2-436c-9641-b980fd1961d6\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " Nov 24 01:41:59 crc kubenswrapper[4755]: I1124 01:41:59.558747 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x49ff\" (UniqueName: \"kubernetes.io/projected/b2e96444-0ef2-436c-9641-b980fd1961d6-kube-api-access-x49ff\") pod \"b2e96444-0ef2-436c-9641-b980fd1961d6\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " Nov 24 01:41:59 crc kubenswrapper[4755]: I1124 01:41:59.558787 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-inventory\") pod \"b2e96444-0ef2-436c-9641-b980fd1961d6\" (UID: \"b2e96444-0ef2-436c-9641-b980fd1961d6\") " Nov 24 01:41:59 crc kubenswrapper[4755]: I1124 01:41:59.564395 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2e96444-0ef2-436c-9641-b980fd1961d6-kube-api-access-x49ff" (OuterVolumeSpecName: "kube-api-access-x49ff") pod "b2e96444-0ef2-436c-9641-b980fd1961d6" (UID: "b2e96444-0ef2-436c-9641-b980fd1961d6"). InnerVolumeSpecName "kube-api-access-x49ff". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:41:59 crc kubenswrapper[4755]: I1124 01:41:59.586408 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-inventory" (OuterVolumeSpecName: "inventory") pod "b2e96444-0ef2-436c-9641-b980fd1961d6" (UID: "b2e96444-0ef2-436c-9641-b980fd1961d6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:41:59 crc kubenswrapper[4755]: I1124 01:41:59.586969 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b2e96444-0ef2-436c-9641-b980fd1961d6" (UID: "b2e96444-0ef2-436c-9641-b980fd1961d6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:41:59 crc kubenswrapper[4755]: I1124 01:41:59.660752 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:59 crc kubenswrapper[4755]: I1124 01:41:59.660788 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x49ff\" (UniqueName: \"kubernetes.io/projected/b2e96444-0ef2-436c-9641-b980fd1961d6-kube-api-access-x49ff\") on node \"crc\" DevicePath \"\"" Nov 24 01:41:59 crc kubenswrapper[4755]: I1124 01:41:59.660804 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2e96444-0ef2-436c-9641-b980fd1961d6-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.015194 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" event={"ID":"b2e96444-0ef2-436c-9641-b980fd1961d6","Type":"ContainerDied","Data":"f22ab65b176e490b3087ed142a13115457fdd3ee2d718e8ce963a65ce23df844"} Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.015230 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f22ab65b176e490b3087ed142a13115457fdd3ee2d718e8ce963a65ce23df844" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.015520 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.129261 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv"] Nov 24 01:42:00 crc kubenswrapper[4755]: E1124 01:42:00.130021 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2e96444-0ef2-436c-9641-b980fd1961d6" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.130045 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2e96444-0ef2-436c-9641-b980fd1961d6" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.130296 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2e96444-0ef2-436c-9641-b980fd1961d6" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.131114 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.134464 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.134699 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.134471 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.134783 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.134881 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.134967 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.145289 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.145634 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.148695 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv"] Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.271672 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.271734 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.271838 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.271917 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.271942 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.271993 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.272031 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.272060 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2szg5\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-kube-api-access-2szg5\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.272084 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.272232 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.272393 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.272705 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.272761 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.272837 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374236 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374375 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374414 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374497 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374549 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374591 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2szg5\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-kube-api-access-2szg5\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374652 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374691 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374726 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374813 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374849 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374882 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374951 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.374992 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.378996 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.379073 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.379538 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.382698 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.383293 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.384288 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.385098 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.386132 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.386245 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.386512 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.391641 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.392163 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.392414 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.399184 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2szg5\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-kube-api-access-2szg5\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-km2xv\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.448044 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:00 crc kubenswrapper[4755]: I1124 01:42:00.985050 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv"] Nov 24 01:42:01 crc kubenswrapper[4755]: I1124 01:42:01.025490 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" event={"ID":"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9","Type":"ContainerStarted","Data":"a39841ab432ce946f8e25c54d26646606b0bcc15c057722e6e3b69a9027fe6f0"} Nov 24 01:42:02 crc kubenswrapper[4755]: I1124 01:42:02.035485 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" event={"ID":"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9","Type":"ContainerStarted","Data":"40d3498f0606fba66a6b18e8327f8b64edf2fdaa0a5ad2bcc97d546ad2807625"} Nov 24 01:42:02 crc kubenswrapper[4755]: I1124 01:42:02.064410 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" podStartSLOduration=1.67863431 podStartE2EDuration="2.064388882s" podCreationTimestamp="2025-11-24 01:42:00 +0000 UTC" firstStartedPulling="2025-11-24 01:42:00.998576304 +0000 UTC m=+1745.684641805" lastFinishedPulling="2025-11-24 01:42:01.384330866 +0000 UTC m=+1746.070396377" observedRunningTime="2025-11-24 01:42:02.057708295 +0000 UTC m=+1746.743773806" watchObservedRunningTime="2025-11-24 01:42:02.064388882 +0000 UTC m=+1746.750454383" Nov 24 01:42:03 crc kubenswrapper[4755]: I1124 01:42:03.927841 4755 scope.go:117] "RemoveContainer" containerID="45c9c9f2107ede10071edaad7cccd1068d8f7d0c8c7642ec9036461f0bab4da8" Nov 24 01:42:07 crc kubenswrapper[4755]: I1124 01:42:07.998286 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:42:08 crc kubenswrapper[4755]: E1124 01:42:07.999273 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:42:22 crc kubenswrapper[4755]: I1124 01:42:22.997052 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:42:22 crc kubenswrapper[4755]: E1124 01:42:22.998109 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:42:33 crc kubenswrapper[4755]: I1124 01:42:33.996593 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:42:33 crc kubenswrapper[4755]: E1124 01:42:33.997515 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:42:42 crc kubenswrapper[4755]: I1124 01:42:42.827837 4755 generic.go:334] "Generic (PLEG): container finished" podID="dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" containerID="40d3498f0606fba66a6b18e8327f8b64edf2fdaa0a5ad2bcc97d546ad2807625" exitCode=0 Nov 24 01:42:42 crc kubenswrapper[4755]: I1124 01:42:42.827954 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" event={"ID":"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9","Type":"ContainerDied","Data":"40d3498f0606fba66a6b18e8327f8b64edf2fdaa0a5ad2bcc97d546ad2807625"} Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.318746 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.473379 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.474078 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-libvirt-combined-ca-bundle\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.474326 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ovn-combined-ca-bundle\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.474502 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.474723 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.474890 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.475054 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-telemetry-combined-ca-bundle\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.475251 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2szg5\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-kube-api-access-2szg5\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.475404 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-inventory\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.475594 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-nova-combined-ca-bundle\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.475787 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-bootstrap-combined-ca-bundle\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.475929 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-neutron-metadata-combined-ca-bundle\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.476102 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-repo-setup-combined-ca-bundle\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.476261 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ssh-key\") pod \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\" (UID: \"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9\") " Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.479945 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.480798 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.480920 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.481553 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.481704 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.482799 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-kube-api-access-2szg5" (OuterVolumeSpecName: "kube-api-access-2szg5") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "kube-api-access-2szg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.482933 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.483140 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.483304 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.484342 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.484823 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.486383 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.523253 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.526019 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-inventory" (OuterVolumeSpecName: "inventory") pod "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" (UID: "dd7e309d-d807-4897-b8e8-cff4ed2c5ac9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581408 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581443 4755 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581456 4755 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581467 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581481 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581495 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581507 4755 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581517 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2szg5\" (UniqueName: \"kubernetes.io/projected/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-kube-api-access-2szg5\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581530 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581540 4755 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581551 4755 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581562 4755 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581573 4755 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.581584 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dd7e309d-d807-4897-b8e8-cff4ed2c5ac9-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.853236 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" event={"ID":"dd7e309d-d807-4897-b8e8-cff4ed2c5ac9","Type":"ContainerDied","Data":"a39841ab432ce946f8e25c54d26646606b0bcc15c057722e6e3b69a9027fe6f0"} Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.853596 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a39841ab432ce946f8e25c54d26646606b0bcc15c057722e6e3b69a9027fe6f0" Nov 24 01:42:44 crc kubenswrapper[4755]: I1124 01:42:44.853461 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-km2xv" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.040418 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c"] Nov 24 01:42:45 crc kubenswrapper[4755]: E1124 01:42:45.040921 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.040942 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.041129 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd7e309d-d807-4897-b8e8-cff4ed2c5ac9" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.041855 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.044369 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.044431 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.045500 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.045786 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.049192 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.051464 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c"] Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.194748 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.194939 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9hpl\" (UniqueName: \"kubernetes.io/projected/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-kube-api-access-t9hpl\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.195141 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.195264 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.195448 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.297532 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.297624 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9hpl\" (UniqueName: \"kubernetes.io/projected/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-kube-api-access-t9hpl\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.297659 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.297691 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.297790 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.298740 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.302569 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.302851 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.305529 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.314452 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9hpl\" (UniqueName: \"kubernetes.io/projected/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-kube-api-access-t9hpl\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bjt8c\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.373881 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:42:45 crc kubenswrapper[4755]: I1124 01:42:45.892156 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c"] Nov 24 01:42:46 crc kubenswrapper[4755]: I1124 01:42:46.006364 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:42:46 crc kubenswrapper[4755]: E1124 01:42:46.006736 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:42:46 crc kubenswrapper[4755]: I1124 01:42:46.877097 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" event={"ID":"0227ae83-e6f1-477d-b3b0-12cfcd8ae318","Type":"ContainerStarted","Data":"926427907dccf16d27079747bac5e29c6295c2c9dc049acee171f7478183782d"} Nov 24 01:42:46 crc kubenswrapper[4755]: I1124 01:42:46.877431 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" event={"ID":"0227ae83-e6f1-477d-b3b0-12cfcd8ae318","Type":"ContainerStarted","Data":"d8112d67e0239a3fe2175aa71a73d2e613f577b650fea0ac67edc278d0acf76f"} Nov 24 01:42:46 crc kubenswrapper[4755]: I1124 01:42:46.901279 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" podStartSLOduration=1.3783325419999999 podStartE2EDuration="1.901259221s" podCreationTimestamp="2025-11-24 01:42:45 +0000 UTC" firstStartedPulling="2025-11-24 01:42:45.893292188 +0000 UTC m=+1790.579357689" lastFinishedPulling="2025-11-24 01:42:46.416218867 +0000 UTC m=+1791.102284368" observedRunningTime="2025-11-24 01:42:46.896333742 +0000 UTC m=+1791.582399313" watchObservedRunningTime="2025-11-24 01:42:46.901259221 +0000 UTC m=+1791.587324722" Nov 24 01:42:58 crc kubenswrapper[4755]: I1124 01:42:58.996471 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:42:58 crc kubenswrapper[4755]: E1124 01:42:58.997401 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:43:11 crc kubenswrapper[4755]: I1124 01:43:11.996953 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:43:11 crc kubenswrapper[4755]: E1124 01:43:11.997747 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:43:22 crc kubenswrapper[4755]: I1124 01:43:22.996593 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:43:22 crc kubenswrapper[4755]: E1124 01:43:22.997421 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:43:33 crc kubenswrapper[4755]: I1124 01:43:33.997224 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:43:34 crc kubenswrapper[4755]: I1124 01:43:34.320711 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"8fe5f5cacaadeb1d6112d56d1f9d970720cdea615fa510a42f2d8a67230c0f60"} Nov 24 01:43:53 crc kubenswrapper[4755]: I1124 01:43:53.534510 4755 generic.go:334] "Generic (PLEG): container finished" podID="0227ae83-e6f1-477d-b3b0-12cfcd8ae318" containerID="926427907dccf16d27079747bac5e29c6295c2c9dc049acee171f7478183782d" exitCode=0 Nov 24 01:43:53 crc kubenswrapper[4755]: I1124 01:43:53.534590 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" event={"ID":"0227ae83-e6f1-477d-b3b0-12cfcd8ae318","Type":"ContainerDied","Data":"926427907dccf16d27079747bac5e29c6295c2c9dc049acee171f7478183782d"} Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.017149 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.036708 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovncontroller-config-0\") pod \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.036779 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovn-combined-ca-bundle\") pod \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.036821 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ssh-key\") pod \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.036860 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9hpl\" (UniqueName: \"kubernetes.io/projected/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-kube-api-access-t9hpl\") pod \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.036883 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-inventory\") pod \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\" (UID: \"0227ae83-e6f1-477d-b3b0-12cfcd8ae318\") " Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.042832 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "0227ae83-e6f1-477d-b3b0-12cfcd8ae318" (UID: "0227ae83-e6f1-477d-b3b0-12cfcd8ae318"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.042955 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-kube-api-access-t9hpl" (OuterVolumeSpecName: "kube-api-access-t9hpl") pod "0227ae83-e6f1-477d-b3b0-12cfcd8ae318" (UID: "0227ae83-e6f1-477d-b3b0-12cfcd8ae318"). InnerVolumeSpecName "kube-api-access-t9hpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.072442 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "0227ae83-e6f1-477d-b3b0-12cfcd8ae318" (UID: "0227ae83-e6f1-477d-b3b0-12cfcd8ae318"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.075915 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-inventory" (OuterVolumeSpecName: "inventory") pod "0227ae83-e6f1-477d-b3b0-12cfcd8ae318" (UID: "0227ae83-e6f1-477d-b3b0-12cfcd8ae318"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.089438 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0227ae83-e6f1-477d-b3b0-12cfcd8ae318" (UID: "0227ae83-e6f1-477d-b3b0-12cfcd8ae318"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.138984 4755 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.139014 4755 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.139023 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.139031 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9hpl\" (UniqueName: \"kubernetes.io/projected/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-kube-api-access-t9hpl\") on node \"crc\" DevicePath \"\"" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.139039 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0227ae83-e6f1-477d-b3b0-12cfcd8ae318-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.558414 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" event={"ID":"0227ae83-e6f1-477d-b3b0-12cfcd8ae318","Type":"ContainerDied","Data":"d8112d67e0239a3fe2175aa71a73d2e613f577b650fea0ac67edc278d0acf76f"} Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.558728 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8112d67e0239a3fe2175aa71a73d2e613f577b650fea0ac67edc278d0acf76f" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.558484 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bjt8c" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.650974 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn"] Nov 24 01:43:55 crc kubenswrapper[4755]: E1124 01:43:55.651342 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0227ae83-e6f1-477d-b3b0-12cfcd8ae318" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.651356 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0227ae83-e6f1-477d-b3b0-12cfcd8ae318" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.651571 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0227ae83-e6f1-477d-b3b0-12cfcd8ae318" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.652423 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.654184 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.654898 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.655018 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.655197 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.657468 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.657660 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.667935 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn"] Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.850411 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp24p\" (UniqueName: \"kubernetes.io/projected/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-kube-api-access-fp24p\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.850500 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.850525 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.850687 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.850725 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.850839 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.952089 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.952139 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.952186 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.952210 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp24p\" (UniqueName: \"kubernetes.io/projected/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-kube-api-access-fp24p\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.952257 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.952286 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.954858 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.955459 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.956832 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.957024 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.958010 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.966262 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.967814 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.969163 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.970195 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.972085 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp24p\" (UniqueName: \"kubernetes.io/projected/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-kube-api-access-fp24p\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.978802 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:43:55 crc kubenswrapper[4755]: I1124 01:43:55.986637 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:43:56 crc kubenswrapper[4755]: I1124 01:43:56.583777 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn"] Nov 24 01:43:56 crc kubenswrapper[4755]: I1124 01:43:56.593903 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 01:43:57 crc kubenswrapper[4755]: I1124 01:43:57.144007 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:43:57 crc kubenswrapper[4755]: I1124 01:43:57.600066 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" event={"ID":"0c0128e5-6f6e-4d49-813c-36d2959a8e3e","Type":"ContainerStarted","Data":"0cf50985cb28d8b7a76e4db6c0997d2f17ef72a091edbbd287e949ec16a18500"} Nov 24 01:43:57 crc kubenswrapper[4755]: I1124 01:43:57.600404 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" event={"ID":"0c0128e5-6f6e-4d49-813c-36d2959a8e3e","Type":"ContainerStarted","Data":"1551368e6c47ae06263fe53cba9f1bcfb056e70ab2856d88b8df1f079b14fe11"} Nov 24 01:44:48 crc kubenswrapper[4755]: I1124 01:44:48.084512 4755 generic.go:334] "Generic (PLEG): container finished" podID="0c0128e5-6f6e-4d49-813c-36d2959a8e3e" containerID="0cf50985cb28d8b7a76e4db6c0997d2f17ef72a091edbbd287e949ec16a18500" exitCode=0 Nov 24 01:44:48 crc kubenswrapper[4755]: I1124 01:44:48.084634 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" event={"ID":"0c0128e5-6f6e-4d49-813c-36d2959a8e3e","Type":"ContainerDied","Data":"0cf50985cb28d8b7a76e4db6c0997d2f17ef72a091edbbd287e949ec16a18500"} Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.491461 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.620274 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.620532 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-nova-metadata-neutron-config-0\") pod \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.620645 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-ssh-key\") pod \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.620680 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-inventory\") pod \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.620842 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-metadata-combined-ca-bundle\") pod \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.620906 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fp24p\" (UniqueName: \"kubernetes.io/projected/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-kube-api-access-fp24p\") pod \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\" (UID: \"0c0128e5-6f6e-4d49-813c-36d2959a8e3e\") " Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.626388 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-kube-api-access-fp24p" (OuterVolumeSpecName: "kube-api-access-fp24p") pod "0c0128e5-6f6e-4d49-813c-36d2959a8e3e" (UID: "0c0128e5-6f6e-4d49-813c-36d2959a8e3e"). InnerVolumeSpecName "kube-api-access-fp24p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.627141 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "0c0128e5-6f6e-4d49-813c-36d2959a8e3e" (UID: "0c0128e5-6f6e-4d49-813c-36d2959a8e3e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.648191 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "0c0128e5-6f6e-4d49-813c-36d2959a8e3e" (UID: "0c0128e5-6f6e-4d49-813c-36d2959a8e3e"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.648873 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0c0128e5-6f6e-4d49-813c-36d2959a8e3e" (UID: "0c0128e5-6f6e-4d49-813c-36d2959a8e3e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.662045 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "0c0128e5-6f6e-4d49-813c-36d2959a8e3e" (UID: "0c0128e5-6f6e-4d49-813c-36d2959a8e3e"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.671367 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-inventory" (OuterVolumeSpecName: "inventory") pod "0c0128e5-6f6e-4d49-813c-36d2959a8e3e" (UID: "0c0128e5-6f6e-4d49-813c-36d2959a8e3e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.723452 4755 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.723483 4755 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.723492 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.723501 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.723509 4755 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:44:49 crc kubenswrapper[4755]: I1124 01:44:49.723519 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fp24p\" (UniqueName: \"kubernetes.io/projected/0c0128e5-6f6e-4d49-813c-36d2959a8e3e-kube-api-access-fp24p\") on node \"crc\" DevicePath \"\"" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.106309 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" event={"ID":"0c0128e5-6f6e-4d49-813c-36d2959a8e3e","Type":"ContainerDied","Data":"1551368e6c47ae06263fe53cba9f1bcfb056e70ab2856d88b8df1f079b14fe11"} Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.106361 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1551368e6c47ae06263fe53cba9f1bcfb056e70ab2856d88b8df1f079b14fe11" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.106416 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.240486 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g"] Nov 24 01:44:50 crc kubenswrapper[4755]: E1124 01:44:50.241039 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0128e5-6f6e-4d49-813c-36d2959a8e3e" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.241070 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0128e5-6f6e-4d49-813c-36d2959a8e3e" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.241430 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0128e5-6f6e-4d49-813c-36d2959a8e3e" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.242379 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.244380 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.247659 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.248095 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.248502 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.248857 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.256016 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g"] Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.334863 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.335170 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.335216 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.335373 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjz6x\" (UniqueName: \"kubernetes.io/projected/cff1906b-beb7-4b0f-b20b-c0d155437b90-kube-api-access-mjz6x\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.335554 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.436966 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.437267 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.437410 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.437594 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjz6x\" (UniqueName: \"kubernetes.io/projected/cff1906b-beb7-4b0f-b20b-c0d155437b90-kube-api-access-mjz6x\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.437761 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.443894 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.443922 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.443952 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.444169 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.454063 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjz6x\" (UniqueName: \"kubernetes.io/projected/cff1906b-beb7-4b0f-b20b-c0d155437b90-kube-api-access-mjz6x\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:50 crc kubenswrapper[4755]: I1124 01:44:50.567673 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:44:51 crc kubenswrapper[4755]: I1124 01:44:51.133341 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g"] Nov 24 01:44:52 crc kubenswrapper[4755]: I1124 01:44:52.131884 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" event={"ID":"cff1906b-beb7-4b0f-b20b-c0d155437b90","Type":"ContainerStarted","Data":"fcf070ded24e3200ce2001d089fef5903660d31509059e581d1d876bf6c07709"} Nov 24 01:44:52 crc kubenswrapper[4755]: I1124 01:44:52.132273 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" event={"ID":"cff1906b-beb7-4b0f-b20b-c0d155437b90","Type":"ContainerStarted","Data":"821ea457a9859282e13557f589325ddbfc5c12f5038f8f0023ecd52b81f31a24"} Nov 24 01:44:52 crc kubenswrapper[4755]: I1124 01:44:52.160033 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" podStartSLOduration=1.668445317 podStartE2EDuration="2.160005345s" podCreationTimestamp="2025-11-24 01:44:50 +0000 UTC" firstStartedPulling="2025-11-24 01:44:51.144387048 +0000 UTC m=+1915.830452559" lastFinishedPulling="2025-11-24 01:44:51.635947086 +0000 UTC m=+1916.322012587" observedRunningTime="2025-11-24 01:44:52.149458219 +0000 UTC m=+1916.835523750" watchObservedRunningTime="2025-11-24 01:44:52.160005345 +0000 UTC m=+1916.846070876" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.151332 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x"] Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.154364 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.158184 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.161834 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.167703 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x"] Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.220844 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnb66\" (UniqueName: \"kubernetes.io/projected/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-kube-api-access-gnb66\") pod \"collect-profiles-29399145-9789x\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.220956 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-secret-volume\") pod \"collect-profiles-29399145-9789x\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.221012 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-config-volume\") pod \"collect-profiles-29399145-9789x\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.322793 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnb66\" (UniqueName: \"kubernetes.io/projected/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-kube-api-access-gnb66\") pod \"collect-profiles-29399145-9789x\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.322876 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-secret-volume\") pod \"collect-profiles-29399145-9789x\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.322908 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-config-volume\") pod \"collect-profiles-29399145-9789x\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.323834 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-config-volume\") pod \"collect-profiles-29399145-9789x\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.328694 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-secret-volume\") pod \"collect-profiles-29399145-9789x\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.342375 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnb66\" (UniqueName: \"kubernetes.io/projected/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-kube-api-access-gnb66\") pod \"collect-profiles-29399145-9789x\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.477395 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:00 crc kubenswrapper[4755]: I1124 01:45:00.900736 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x"] Nov 24 01:45:01 crc kubenswrapper[4755]: I1124 01:45:01.214439 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" event={"ID":"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd","Type":"ContainerStarted","Data":"664140f4608c7243c8a961709f36219ece9c18a38232017e6056d7eb5dda2f02"} Nov 24 01:45:01 crc kubenswrapper[4755]: I1124 01:45:01.215529 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" event={"ID":"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd","Type":"ContainerStarted","Data":"6dfab14e7cf8164dd37242204c443f4f214d757bc4430e6edb79033559349ca9"} Nov 24 01:45:01 crc kubenswrapper[4755]: I1124 01:45:01.240504 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" podStartSLOduration=1.240484299 podStartE2EDuration="1.240484299s" podCreationTimestamp="2025-11-24 01:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 01:45:01.231831966 +0000 UTC m=+1925.917897467" watchObservedRunningTime="2025-11-24 01:45:01.240484299 +0000 UTC m=+1925.926549800" Nov 24 01:45:02 crc kubenswrapper[4755]: I1124 01:45:02.226861 4755 generic.go:334] "Generic (PLEG): container finished" podID="7f7174c0-83bd-4f05-be4c-36c4e92d9bdd" containerID="664140f4608c7243c8a961709f36219ece9c18a38232017e6056d7eb5dda2f02" exitCode=0 Nov 24 01:45:02 crc kubenswrapper[4755]: I1124 01:45:02.226961 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" event={"ID":"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd","Type":"ContainerDied","Data":"664140f4608c7243c8a961709f36219ece9c18a38232017e6056d7eb5dda2f02"} Nov 24 01:45:03 crc kubenswrapper[4755]: I1124 01:45:03.573709 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:45:03 crc kubenswrapper[4755]: I1124 01:45:03.686818 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-config-volume\") pod \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " Nov 24 01:45:03 crc kubenswrapper[4755]: I1124 01:45:03.687327 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-secret-volume\") pod \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " Nov 24 01:45:03 crc kubenswrapper[4755]: I1124 01:45:03.687401 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnb66\" (UniqueName: \"kubernetes.io/projected/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-kube-api-access-gnb66\") pod \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\" (UID: \"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd\") " Nov 24 01:45:03 crc kubenswrapper[4755]: I1124 01:45:03.687484 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-config-volume" (OuterVolumeSpecName: "config-volume") pod "7f7174c0-83bd-4f05-be4c-36c4e92d9bdd" (UID: "7f7174c0-83bd-4f05-be4c-36c4e92d9bdd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:45:03 crc kubenswrapper[4755]: I1124 01:45:03.688075 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 01:45:03 crc kubenswrapper[4755]: I1124 01:45:03.692926 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7f7174c0-83bd-4f05-be4c-36c4e92d9bdd" (UID: "7f7174c0-83bd-4f05-be4c-36c4e92d9bdd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:45:03 crc kubenswrapper[4755]: I1124 01:45:03.692943 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-kube-api-access-gnb66" (OuterVolumeSpecName: "kube-api-access-gnb66") pod "7f7174c0-83bd-4f05-be4c-36c4e92d9bdd" (UID: "7f7174c0-83bd-4f05-be4c-36c4e92d9bdd"). InnerVolumeSpecName "kube-api-access-gnb66". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:45:03 crc kubenswrapper[4755]: I1124 01:45:03.790077 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnb66\" (UniqueName: \"kubernetes.io/projected/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-kube-api-access-gnb66\") on node \"crc\" DevicePath \"\"" Nov 24 01:45:03 crc kubenswrapper[4755]: I1124 01:45:03.790136 4755 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7f7174c0-83bd-4f05-be4c-36c4e92d9bdd-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 01:45:04 crc kubenswrapper[4755]: I1124 01:45:04.248771 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" event={"ID":"7f7174c0-83bd-4f05-be4c-36c4e92d9bdd","Type":"ContainerDied","Data":"6dfab14e7cf8164dd37242204c443f4f214d757bc4430e6edb79033559349ca9"} Nov 24 01:45:04 crc kubenswrapper[4755]: I1124 01:45:04.248805 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6dfab14e7cf8164dd37242204c443f4f214d757bc4430e6edb79033559349ca9" Nov 24 01:45:04 crc kubenswrapper[4755]: I1124 01:45:04.248876 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399145-9789x" Nov 24 01:46:03 crc kubenswrapper[4755]: I1124 01:46:03.296206 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:46:03 crc kubenswrapper[4755]: I1124 01:46:03.296858 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:46:33 crc kubenswrapper[4755]: I1124 01:46:33.294998 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:46:33 crc kubenswrapper[4755]: I1124 01:46:33.295555 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:47:03 crc kubenswrapper[4755]: I1124 01:47:03.295004 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:47:03 crc kubenswrapper[4755]: I1124 01:47:03.295651 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:47:03 crc kubenswrapper[4755]: I1124 01:47:03.295702 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:47:03 crc kubenswrapper[4755]: I1124 01:47:03.296467 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8fe5f5cacaadeb1d6112d56d1f9d970720cdea615fa510a42f2d8a67230c0f60"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:47:03 crc kubenswrapper[4755]: I1124 01:47:03.296529 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://8fe5f5cacaadeb1d6112d56d1f9d970720cdea615fa510a42f2d8a67230c0f60" gracePeriod=600 Nov 24 01:47:03 crc kubenswrapper[4755]: I1124 01:47:03.469161 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="8fe5f5cacaadeb1d6112d56d1f9d970720cdea615fa510a42f2d8a67230c0f60" exitCode=0 Nov 24 01:47:03 crc kubenswrapper[4755]: I1124 01:47:03.469237 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"8fe5f5cacaadeb1d6112d56d1f9d970720cdea615fa510a42f2d8a67230c0f60"} Nov 24 01:47:03 crc kubenswrapper[4755]: I1124 01:47:03.469452 4755 scope.go:117] "RemoveContainer" containerID="8c75477967bfd17f465845e9ad9d8e30c5874427ba92c3a82e0868aae5f07ae3" Nov 24 01:47:04 crc kubenswrapper[4755]: I1124 01:47:04.482942 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e"} Nov 24 01:47:17 crc kubenswrapper[4755]: I1124 01:47:17.933666 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8p8cd"] Nov 24 01:47:17 crc kubenswrapper[4755]: E1124 01:47:17.934737 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f7174c0-83bd-4f05-be4c-36c4e92d9bdd" containerName="collect-profiles" Nov 24 01:47:17 crc kubenswrapper[4755]: I1124 01:47:17.934755 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f7174c0-83bd-4f05-be4c-36c4e92d9bdd" containerName="collect-profiles" Nov 24 01:47:17 crc kubenswrapper[4755]: I1124 01:47:17.935031 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f7174c0-83bd-4f05-be4c-36c4e92d9bdd" containerName="collect-profiles" Nov 24 01:47:17 crc kubenswrapper[4755]: I1124 01:47:17.936792 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:17 crc kubenswrapper[4755]: I1124 01:47:17.942793 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8p8cd"] Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.029596 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-catalog-content\") pod \"redhat-operators-8p8cd\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.030031 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-utilities\") pod \"redhat-operators-8p8cd\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.030125 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6sks\" (UniqueName: \"kubernetes.io/projected/908477b3-f6f5-4b7e-a372-aee77bcc2ada-kube-api-access-m6sks\") pod \"redhat-operators-8p8cd\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.132281 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-utilities\") pod \"redhat-operators-8p8cd\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.132362 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6sks\" (UniqueName: \"kubernetes.io/projected/908477b3-f6f5-4b7e-a372-aee77bcc2ada-kube-api-access-m6sks\") pod \"redhat-operators-8p8cd\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.132475 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-catalog-content\") pod \"redhat-operators-8p8cd\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.132734 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-utilities\") pod \"redhat-operators-8p8cd\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.133088 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-catalog-content\") pod \"redhat-operators-8p8cd\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.184865 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6sks\" (UniqueName: \"kubernetes.io/projected/908477b3-f6f5-4b7e-a372-aee77bcc2ada-kube-api-access-m6sks\") pod \"redhat-operators-8p8cd\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.263978 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:18 crc kubenswrapper[4755]: I1124 01:47:18.726430 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8p8cd"] Nov 24 01:47:19 crc kubenswrapper[4755]: I1124 01:47:19.630009 4755 generic.go:334] "Generic (PLEG): container finished" podID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerID="4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735" exitCode=0 Nov 24 01:47:19 crc kubenswrapper[4755]: I1124 01:47:19.630098 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8p8cd" event={"ID":"908477b3-f6f5-4b7e-a372-aee77bcc2ada","Type":"ContainerDied","Data":"4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735"} Nov 24 01:47:19 crc kubenswrapper[4755]: I1124 01:47:19.630365 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8p8cd" event={"ID":"908477b3-f6f5-4b7e-a372-aee77bcc2ada","Type":"ContainerStarted","Data":"febeb78b44e4d24c839c537c8ecd636b6d6d1566fe76743f1f9b3ec9b1590d75"} Nov 24 01:47:20 crc kubenswrapper[4755]: I1124 01:47:20.640418 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8p8cd" event={"ID":"908477b3-f6f5-4b7e-a372-aee77bcc2ada","Type":"ContainerStarted","Data":"1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a"} Nov 24 01:47:21 crc kubenswrapper[4755]: I1124 01:47:21.651593 4755 generic.go:334] "Generic (PLEG): container finished" podID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerID="1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a" exitCode=0 Nov 24 01:47:21 crc kubenswrapper[4755]: I1124 01:47:21.651664 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8p8cd" event={"ID":"908477b3-f6f5-4b7e-a372-aee77bcc2ada","Type":"ContainerDied","Data":"1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a"} Nov 24 01:47:22 crc kubenswrapper[4755]: I1124 01:47:22.666355 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8p8cd" event={"ID":"908477b3-f6f5-4b7e-a372-aee77bcc2ada","Type":"ContainerStarted","Data":"cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387"} Nov 24 01:47:22 crc kubenswrapper[4755]: I1124 01:47:22.685837 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8p8cd" podStartSLOduration=3.097546215 podStartE2EDuration="5.685814112s" podCreationTimestamp="2025-11-24 01:47:17 +0000 UTC" firstStartedPulling="2025-11-24 01:47:19.631688098 +0000 UTC m=+2064.317753599" lastFinishedPulling="2025-11-24 01:47:22.219955985 +0000 UTC m=+2066.906021496" observedRunningTime="2025-11-24 01:47:22.682085988 +0000 UTC m=+2067.368151499" watchObservedRunningTime="2025-11-24 01:47:22.685814112 +0000 UTC m=+2067.371879633" Nov 24 01:47:28 crc kubenswrapper[4755]: I1124 01:47:28.265639 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:28 crc kubenswrapper[4755]: I1124 01:47:28.266421 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:28 crc kubenswrapper[4755]: I1124 01:47:28.315139 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:28 crc kubenswrapper[4755]: I1124 01:47:28.767713 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:28 crc kubenswrapper[4755]: I1124 01:47:28.819232 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8p8cd"] Nov 24 01:47:30 crc kubenswrapper[4755]: I1124 01:47:30.735791 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8p8cd" podUID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerName="registry-server" containerID="cri-o://cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387" gracePeriod=2 Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.232417 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.384937 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6sks\" (UniqueName: \"kubernetes.io/projected/908477b3-f6f5-4b7e-a372-aee77bcc2ada-kube-api-access-m6sks\") pod \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.385064 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-utilities\") pod \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.385139 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-catalog-content\") pod \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\" (UID: \"908477b3-f6f5-4b7e-a372-aee77bcc2ada\") " Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.385967 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-utilities" (OuterVolumeSpecName: "utilities") pod "908477b3-f6f5-4b7e-a372-aee77bcc2ada" (UID: "908477b3-f6f5-4b7e-a372-aee77bcc2ada"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.387235 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.391122 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/908477b3-f6f5-4b7e-a372-aee77bcc2ada-kube-api-access-m6sks" (OuterVolumeSpecName: "kube-api-access-m6sks") pod "908477b3-f6f5-4b7e-a372-aee77bcc2ada" (UID: "908477b3-f6f5-4b7e-a372-aee77bcc2ada"). InnerVolumeSpecName "kube-api-access-m6sks". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.485028 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "908477b3-f6f5-4b7e-a372-aee77bcc2ada" (UID: "908477b3-f6f5-4b7e-a372-aee77bcc2ada"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.488945 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/908477b3-f6f5-4b7e-a372-aee77bcc2ada-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.488981 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6sks\" (UniqueName: \"kubernetes.io/projected/908477b3-f6f5-4b7e-a372-aee77bcc2ada-kube-api-access-m6sks\") on node \"crc\" DevicePath \"\"" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.748311 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8p8cd" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.748357 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8p8cd" event={"ID":"908477b3-f6f5-4b7e-a372-aee77bcc2ada","Type":"ContainerDied","Data":"cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387"} Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.748504 4755 scope.go:117] "RemoveContainer" containerID="cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.748181 4755 generic.go:334] "Generic (PLEG): container finished" podID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerID="cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387" exitCode=0 Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.749221 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8p8cd" event={"ID":"908477b3-f6f5-4b7e-a372-aee77bcc2ada","Type":"ContainerDied","Data":"febeb78b44e4d24c839c537c8ecd636b6d6d1566fe76743f1f9b3ec9b1590d75"} Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.773724 4755 scope.go:117] "RemoveContainer" containerID="1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.799103 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8p8cd"] Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.812156 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8p8cd"] Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.821816 4755 scope.go:117] "RemoveContainer" containerID="4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.853327 4755 scope.go:117] "RemoveContainer" containerID="cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387" Nov 24 01:47:31 crc kubenswrapper[4755]: E1124 01:47:31.853925 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387\": container with ID starting with cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387 not found: ID does not exist" containerID="cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.853958 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387"} err="failed to get container status \"cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387\": rpc error: code = NotFound desc = could not find container \"cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387\": container with ID starting with cd349cfd1448381f3033ce37c08c6a8c631854e305cf4b6d8b700ecf9e38f387 not found: ID does not exist" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.853987 4755 scope.go:117] "RemoveContainer" containerID="1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a" Nov 24 01:47:31 crc kubenswrapper[4755]: E1124 01:47:31.854262 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a\": container with ID starting with 1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a not found: ID does not exist" containerID="1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.854372 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a"} err="failed to get container status \"1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a\": rpc error: code = NotFound desc = could not find container \"1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a\": container with ID starting with 1ce79bbc4fd016a157c039f9c1267b8ca3d8ab75fcc38c2e090f4bfac8db752a not found: ID does not exist" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.854550 4755 scope.go:117] "RemoveContainer" containerID="4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735" Nov 24 01:47:31 crc kubenswrapper[4755]: E1124 01:47:31.854935 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735\": container with ID starting with 4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735 not found: ID does not exist" containerID="4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735" Nov 24 01:47:31 crc kubenswrapper[4755]: I1124 01:47:31.855043 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735"} err="failed to get container status \"4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735\": rpc error: code = NotFound desc = could not find container \"4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735\": container with ID starting with 4ec93e3b566efbbe11df58b9bc368a93fd6ed7d2bbec2648d260d91a81123735 not found: ID does not exist" Nov 24 01:47:32 crc kubenswrapper[4755]: I1124 01:47:32.008552 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" path="/var/lib/kubelet/pods/908477b3-f6f5-4b7e-a372-aee77bcc2ada/volumes" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.692863 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kq8dw"] Nov 24 01:48:18 crc kubenswrapper[4755]: E1124 01:48:18.693819 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerName="extract-utilities" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.693833 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerName="extract-utilities" Nov 24 01:48:18 crc kubenswrapper[4755]: E1124 01:48:18.693860 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerName="extract-content" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.693867 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerName="extract-content" Nov 24 01:48:18 crc kubenswrapper[4755]: E1124 01:48:18.693880 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerName="registry-server" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.693886 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerName="registry-server" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.694074 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="908477b3-f6f5-4b7e-a372-aee77bcc2ada" containerName="registry-server" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.695377 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.706424 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kq8dw"] Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.746505 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-utilities\") pod \"redhat-marketplace-kq8dw\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.746649 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmzmd\" (UniqueName: \"kubernetes.io/projected/a29390c0-2bbd-4960-a20d-c3b69f300dc3-kube-api-access-vmzmd\") pod \"redhat-marketplace-kq8dw\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.746818 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-catalog-content\") pod \"redhat-marketplace-kq8dw\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.848520 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-utilities\") pod \"redhat-marketplace-kq8dw\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.848963 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmzmd\" (UniqueName: \"kubernetes.io/projected/a29390c0-2bbd-4960-a20d-c3b69f300dc3-kube-api-access-vmzmd\") pod \"redhat-marketplace-kq8dw\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.849086 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-utilities\") pod \"redhat-marketplace-kq8dw\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.849495 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-catalog-content\") pod \"redhat-marketplace-kq8dw\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.849938 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-catalog-content\") pod \"redhat-marketplace-kq8dw\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:18 crc kubenswrapper[4755]: I1124 01:48:18.870827 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmzmd\" (UniqueName: \"kubernetes.io/projected/a29390c0-2bbd-4960-a20d-c3b69f300dc3-kube-api-access-vmzmd\") pod \"redhat-marketplace-kq8dw\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:19 crc kubenswrapper[4755]: I1124 01:48:19.021818 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:19 crc kubenswrapper[4755]: I1124 01:48:19.308123 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kq8dw"] Nov 24 01:48:20 crc kubenswrapper[4755]: I1124 01:48:20.202428 4755 generic.go:334] "Generic (PLEG): container finished" podID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerID="abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa" exitCode=0 Nov 24 01:48:20 crc kubenswrapper[4755]: I1124 01:48:20.202471 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kq8dw" event={"ID":"a29390c0-2bbd-4960-a20d-c3b69f300dc3","Type":"ContainerDied","Data":"abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa"} Nov 24 01:48:20 crc kubenswrapper[4755]: I1124 01:48:20.202514 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kq8dw" event={"ID":"a29390c0-2bbd-4960-a20d-c3b69f300dc3","Type":"ContainerStarted","Data":"253ff55fd803cb7537f69eb9ff18a98af23486c6f54ca820d8864f9990f7d86b"} Nov 24 01:48:22 crc kubenswrapper[4755]: I1124 01:48:22.766539 4755 generic.go:334] "Generic (PLEG): container finished" podID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerID="a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2" exitCode=0 Nov 24 01:48:22 crc kubenswrapper[4755]: I1124 01:48:22.766650 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kq8dw" event={"ID":"a29390c0-2bbd-4960-a20d-c3b69f300dc3","Type":"ContainerDied","Data":"a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2"} Nov 24 01:48:23 crc kubenswrapper[4755]: I1124 01:48:23.778507 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kq8dw" event={"ID":"a29390c0-2bbd-4960-a20d-c3b69f300dc3","Type":"ContainerStarted","Data":"c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16"} Nov 24 01:48:23 crc kubenswrapper[4755]: I1124 01:48:23.806837 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kq8dw" podStartSLOduration=2.67332021 podStartE2EDuration="5.806816593s" podCreationTimestamp="2025-11-24 01:48:18 +0000 UTC" firstStartedPulling="2025-11-24 01:48:20.204969572 +0000 UTC m=+2124.891035083" lastFinishedPulling="2025-11-24 01:48:23.338465975 +0000 UTC m=+2128.024531466" observedRunningTime="2025-11-24 01:48:23.803526251 +0000 UTC m=+2128.489591752" watchObservedRunningTime="2025-11-24 01:48:23.806816593 +0000 UTC m=+2128.492882104" Nov 24 01:48:24 crc kubenswrapper[4755]: I1124 01:48:24.876664 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wlb57"] Nov 24 01:48:24 crc kubenswrapper[4755]: I1124 01:48:24.879826 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:24 crc kubenswrapper[4755]: I1124 01:48:24.887467 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wlb57"] Nov 24 01:48:24 crc kubenswrapper[4755]: I1124 01:48:24.966263 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-utilities\") pod \"certified-operators-wlb57\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:24 crc kubenswrapper[4755]: I1124 01:48:24.966642 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84z7c\" (UniqueName: \"kubernetes.io/projected/36dc1e89-2e6f-479e-806a-75ece4ff32ec-kube-api-access-84z7c\") pod \"certified-operators-wlb57\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:24 crc kubenswrapper[4755]: I1124 01:48:24.966844 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-catalog-content\") pod \"certified-operators-wlb57\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:25 crc kubenswrapper[4755]: I1124 01:48:25.070169 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-catalog-content\") pod \"certified-operators-wlb57\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:25 crc kubenswrapper[4755]: I1124 01:48:25.070800 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-utilities\") pod \"certified-operators-wlb57\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:25 crc kubenswrapper[4755]: I1124 01:48:25.071054 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-catalog-content\") pod \"certified-operators-wlb57\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:25 crc kubenswrapper[4755]: I1124 01:48:25.071148 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-utilities\") pod \"certified-operators-wlb57\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:25 crc kubenswrapper[4755]: I1124 01:48:25.071519 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84z7c\" (UniqueName: \"kubernetes.io/projected/36dc1e89-2e6f-479e-806a-75ece4ff32ec-kube-api-access-84z7c\") pod \"certified-operators-wlb57\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:25 crc kubenswrapper[4755]: I1124 01:48:25.094988 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84z7c\" (UniqueName: \"kubernetes.io/projected/36dc1e89-2e6f-479e-806a-75ece4ff32ec-kube-api-access-84z7c\") pod \"certified-operators-wlb57\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:25 crc kubenswrapper[4755]: I1124 01:48:25.207675 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:25 crc kubenswrapper[4755]: I1124 01:48:25.694116 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wlb57"] Nov 24 01:48:25 crc kubenswrapper[4755]: I1124 01:48:25.807064 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wlb57" event={"ID":"36dc1e89-2e6f-479e-806a-75ece4ff32ec","Type":"ContainerStarted","Data":"04ba1178a8124ceff0deb06c2945a48608b922fe670f0c6861c665dba25e3313"} Nov 24 01:48:26 crc kubenswrapper[4755]: I1124 01:48:26.820799 4755 generic.go:334] "Generic (PLEG): container finished" podID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerID="0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822" exitCode=0 Nov 24 01:48:26 crc kubenswrapper[4755]: I1124 01:48:26.820878 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wlb57" event={"ID":"36dc1e89-2e6f-479e-806a-75ece4ff32ec","Type":"ContainerDied","Data":"0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822"} Nov 24 01:48:28 crc kubenswrapper[4755]: I1124 01:48:28.846859 4755 generic.go:334] "Generic (PLEG): container finished" podID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerID="d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c" exitCode=0 Nov 24 01:48:28 crc kubenswrapper[4755]: I1124 01:48:28.846990 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wlb57" event={"ID":"36dc1e89-2e6f-479e-806a-75ece4ff32ec","Type":"ContainerDied","Data":"d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c"} Nov 24 01:48:29 crc kubenswrapper[4755]: I1124 01:48:29.022144 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:29 crc kubenswrapper[4755]: I1124 01:48:29.022210 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:29 crc kubenswrapper[4755]: I1124 01:48:29.098816 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:29 crc kubenswrapper[4755]: I1124 01:48:29.862722 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wlb57" event={"ID":"36dc1e89-2e6f-479e-806a-75ece4ff32ec","Type":"ContainerStarted","Data":"eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed"} Nov 24 01:48:29 crc kubenswrapper[4755]: I1124 01:48:29.886517 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wlb57" podStartSLOduration=3.33550323 podStartE2EDuration="5.886498162s" podCreationTimestamp="2025-11-24 01:48:24 +0000 UTC" firstStartedPulling="2025-11-24 01:48:26.823520717 +0000 UTC m=+2131.509586218" lastFinishedPulling="2025-11-24 01:48:29.374515639 +0000 UTC m=+2134.060581150" observedRunningTime="2025-11-24 01:48:29.885910485 +0000 UTC m=+2134.571975996" watchObservedRunningTime="2025-11-24 01:48:29.886498162 +0000 UTC m=+2134.572563663" Nov 24 01:48:29 crc kubenswrapper[4755]: I1124 01:48:29.925657 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:31 crc kubenswrapper[4755]: I1124 01:48:31.471625 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kq8dw"] Nov 24 01:48:31 crc kubenswrapper[4755]: I1124 01:48:31.885503 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kq8dw" podUID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerName="registry-server" containerID="cri-o://c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16" gracePeriod=2 Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.362361 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.470734 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-utilities\") pod \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.470821 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmzmd\" (UniqueName: \"kubernetes.io/projected/a29390c0-2bbd-4960-a20d-c3b69f300dc3-kube-api-access-vmzmd\") pod \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.470887 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-catalog-content\") pod \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\" (UID: \"a29390c0-2bbd-4960-a20d-c3b69f300dc3\") " Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.472115 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-utilities" (OuterVolumeSpecName: "utilities") pod "a29390c0-2bbd-4960-a20d-c3b69f300dc3" (UID: "a29390c0-2bbd-4960-a20d-c3b69f300dc3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.478798 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a29390c0-2bbd-4960-a20d-c3b69f300dc3-kube-api-access-vmzmd" (OuterVolumeSpecName: "kube-api-access-vmzmd") pod "a29390c0-2bbd-4960-a20d-c3b69f300dc3" (UID: "a29390c0-2bbd-4960-a20d-c3b69f300dc3"). InnerVolumeSpecName "kube-api-access-vmzmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.490080 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a29390c0-2bbd-4960-a20d-c3b69f300dc3" (UID: "a29390c0-2bbd-4960-a20d-c3b69f300dc3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.574980 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.575030 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmzmd\" (UniqueName: \"kubernetes.io/projected/a29390c0-2bbd-4960-a20d-c3b69f300dc3-kube-api-access-vmzmd\") on node \"crc\" DevicePath \"\"" Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.575052 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a29390c0-2bbd-4960-a20d-c3b69f300dc3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.900180 4755 generic.go:334] "Generic (PLEG): container finished" podID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerID="c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16" exitCode=0 Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.900257 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kq8dw" event={"ID":"a29390c0-2bbd-4960-a20d-c3b69f300dc3","Type":"ContainerDied","Data":"c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16"} Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.900668 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kq8dw" event={"ID":"a29390c0-2bbd-4960-a20d-c3b69f300dc3","Type":"ContainerDied","Data":"253ff55fd803cb7537f69eb9ff18a98af23486c6f54ca820d8864f9990f7d86b"} Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.900702 4755 scope.go:117] "RemoveContainer" containerID="c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16" Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.900311 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kq8dw" Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.931522 4755 scope.go:117] "RemoveContainer" containerID="a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2" Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.940353 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kq8dw"] Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.946924 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kq8dw"] Nov 24 01:48:32 crc kubenswrapper[4755]: I1124 01:48:32.958116 4755 scope.go:117] "RemoveContainer" containerID="abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa" Nov 24 01:48:33 crc kubenswrapper[4755]: I1124 01:48:33.020351 4755 scope.go:117] "RemoveContainer" containerID="c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16" Nov 24 01:48:33 crc kubenswrapper[4755]: E1124 01:48:33.020911 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16\": container with ID starting with c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16 not found: ID does not exist" containerID="c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16" Nov 24 01:48:33 crc kubenswrapper[4755]: I1124 01:48:33.020978 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16"} err="failed to get container status \"c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16\": rpc error: code = NotFound desc = could not find container \"c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16\": container with ID starting with c208c88f77fda8fa299b4b9d94ed2a1989959ac9bcf46b1d77c48084c0783f16 not found: ID does not exist" Nov 24 01:48:33 crc kubenswrapper[4755]: I1124 01:48:33.021022 4755 scope.go:117] "RemoveContainer" containerID="a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2" Nov 24 01:48:33 crc kubenswrapper[4755]: E1124 01:48:33.021454 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2\": container with ID starting with a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2 not found: ID does not exist" containerID="a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2" Nov 24 01:48:33 crc kubenswrapper[4755]: I1124 01:48:33.021500 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2"} err="failed to get container status \"a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2\": rpc error: code = NotFound desc = could not find container \"a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2\": container with ID starting with a5379dea74a53ad439697bf1c2d4d4ae8cd9313f50909a90ce281c795e5357c2 not found: ID does not exist" Nov 24 01:48:33 crc kubenswrapper[4755]: I1124 01:48:33.021528 4755 scope.go:117] "RemoveContainer" containerID="abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa" Nov 24 01:48:33 crc kubenswrapper[4755]: E1124 01:48:33.022280 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa\": container with ID starting with abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa not found: ID does not exist" containerID="abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa" Nov 24 01:48:33 crc kubenswrapper[4755]: I1124 01:48:33.022309 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa"} err="failed to get container status \"abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa\": rpc error: code = NotFound desc = could not find container \"abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa\": container with ID starting with abe69e79af6884986d11d7ba76e8d5eca3173c6928cbc7147448caf130b7abaa not found: ID does not exist" Nov 24 01:48:34 crc kubenswrapper[4755]: I1124 01:48:34.008995 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" path="/var/lib/kubelet/pods/a29390c0-2bbd-4960-a20d-c3b69f300dc3/volumes" Nov 24 01:48:35 crc kubenswrapper[4755]: I1124 01:48:35.207823 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:35 crc kubenswrapper[4755]: I1124 01:48:35.208142 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:35 crc kubenswrapper[4755]: I1124 01:48:35.281642 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:36 crc kubenswrapper[4755]: I1124 01:48:36.015872 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:36 crc kubenswrapper[4755]: I1124 01:48:36.868580 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wlb57"] Nov 24 01:48:37 crc kubenswrapper[4755]: I1124 01:48:37.954543 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wlb57" podUID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerName="registry-server" containerID="cri-o://eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed" gracePeriod=2 Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.511494 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.612877 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-utilities\") pod \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.612967 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84z7c\" (UniqueName: \"kubernetes.io/projected/36dc1e89-2e6f-479e-806a-75ece4ff32ec-kube-api-access-84z7c\") pod \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.613179 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-catalog-content\") pod \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\" (UID: \"36dc1e89-2e6f-479e-806a-75ece4ff32ec\") " Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.614570 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-utilities" (OuterVolumeSpecName: "utilities") pod "36dc1e89-2e6f-479e-806a-75ece4ff32ec" (UID: "36dc1e89-2e6f-479e-806a-75ece4ff32ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.622044 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36dc1e89-2e6f-479e-806a-75ece4ff32ec-kube-api-access-84z7c" (OuterVolumeSpecName: "kube-api-access-84z7c") pod "36dc1e89-2e6f-479e-806a-75ece4ff32ec" (UID: "36dc1e89-2e6f-479e-806a-75ece4ff32ec"). InnerVolumeSpecName "kube-api-access-84z7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.676390 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "36dc1e89-2e6f-479e-806a-75ece4ff32ec" (UID: "36dc1e89-2e6f-479e-806a-75ece4ff32ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.715076 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.715112 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36dc1e89-2e6f-479e-806a-75ece4ff32ec-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.715122 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84z7c\" (UniqueName: \"kubernetes.io/projected/36dc1e89-2e6f-479e-806a-75ece4ff32ec-kube-api-access-84z7c\") on node \"crc\" DevicePath \"\"" Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.969661 4755 generic.go:334] "Generic (PLEG): container finished" podID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerID="eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed" exitCode=0 Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.969726 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wlb57" event={"ID":"36dc1e89-2e6f-479e-806a-75ece4ff32ec","Type":"ContainerDied","Data":"eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed"} Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.969772 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wlb57" Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.969797 4755 scope.go:117] "RemoveContainer" containerID="eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed" Nov 24 01:48:38 crc kubenswrapper[4755]: I1124 01:48:38.969779 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wlb57" event={"ID":"36dc1e89-2e6f-479e-806a-75ece4ff32ec","Type":"ContainerDied","Data":"04ba1178a8124ceff0deb06c2945a48608b922fe670f0c6861c665dba25e3313"} Nov 24 01:48:39 crc kubenswrapper[4755]: I1124 01:48:39.012236 4755 scope.go:117] "RemoveContainer" containerID="d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c" Nov 24 01:48:39 crc kubenswrapper[4755]: I1124 01:48:39.015224 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wlb57"] Nov 24 01:48:39 crc kubenswrapper[4755]: I1124 01:48:39.023500 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wlb57"] Nov 24 01:48:39 crc kubenswrapper[4755]: I1124 01:48:39.039125 4755 scope.go:117] "RemoveContainer" containerID="0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822" Nov 24 01:48:39 crc kubenswrapper[4755]: I1124 01:48:39.094769 4755 scope.go:117] "RemoveContainer" containerID="eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed" Nov 24 01:48:39 crc kubenswrapper[4755]: E1124 01:48:39.095653 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed\": container with ID starting with eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed not found: ID does not exist" containerID="eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed" Nov 24 01:48:39 crc kubenswrapper[4755]: I1124 01:48:39.095720 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed"} err="failed to get container status \"eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed\": rpc error: code = NotFound desc = could not find container \"eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed\": container with ID starting with eefbf29750ea9ae6e546aab5258c38f74ee5d6585587176232ab9634b07fdbed not found: ID does not exist" Nov 24 01:48:39 crc kubenswrapper[4755]: I1124 01:48:39.095764 4755 scope.go:117] "RemoveContainer" containerID="d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c" Nov 24 01:48:39 crc kubenswrapper[4755]: E1124 01:48:39.096570 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c\": container with ID starting with d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c not found: ID does not exist" containerID="d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c" Nov 24 01:48:39 crc kubenswrapper[4755]: I1124 01:48:39.096633 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c"} err="failed to get container status \"d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c\": rpc error: code = NotFound desc = could not find container \"d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c\": container with ID starting with d316460ca0b3a525d830e95e9a5977446f6bf9fbd21930c115a7f7f8a1acd44c not found: ID does not exist" Nov 24 01:48:39 crc kubenswrapper[4755]: I1124 01:48:39.096663 4755 scope.go:117] "RemoveContainer" containerID="0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822" Nov 24 01:48:39 crc kubenswrapper[4755]: E1124 01:48:39.097230 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822\": container with ID starting with 0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822 not found: ID does not exist" containerID="0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822" Nov 24 01:48:39 crc kubenswrapper[4755]: I1124 01:48:39.097282 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822"} err="failed to get container status \"0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822\": rpc error: code = NotFound desc = could not find container \"0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822\": container with ID starting with 0f0e7e15c03313ede0825e32003bba90442b0ac8b6f8f6a2893ef703ccb64822 not found: ID does not exist" Nov 24 01:48:40 crc kubenswrapper[4755]: I1124 01:48:40.011772 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" path="/var/lib/kubelet/pods/36dc1e89-2e6f-479e-806a-75ece4ff32ec/volumes" Nov 24 01:49:03 crc kubenswrapper[4755]: I1124 01:49:03.295073 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:49:03 crc kubenswrapper[4755]: I1124 01:49:03.295745 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:49:09 crc kubenswrapper[4755]: I1124 01:49:09.261203 4755 generic.go:334] "Generic (PLEG): container finished" podID="cff1906b-beb7-4b0f-b20b-c0d155437b90" containerID="fcf070ded24e3200ce2001d089fef5903660d31509059e581d1d876bf6c07709" exitCode=0 Nov 24 01:49:09 crc kubenswrapper[4755]: I1124 01:49:09.261344 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" event={"ID":"cff1906b-beb7-4b0f-b20b-c0d155437b90","Type":"ContainerDied","Data":"fcf070ded24e3200ce2001d089fef5903660d31509059e581d1d876bf6c07709"} Nov 24 01:49:10 crc kubenswrapper[4755]: I1124 01:49:10.798814 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:49:10 crc kubenswrapper[4755]: I1124 01:49:10.988354 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjz6x\" (UniqueName: \"kubernetes.io/projected/cff1906b-beb7-4b0f-b20b-c0d155437b90-kube-api-access-mjz6x\") pod \"cff1906b-beb7-4b0f-b20b-c0d155437b90\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " Nov 24 01:49:10 crc kubenswrapper[4755]: I1124 01:49:10.988735 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-ssh-key\") pod \"cff1906b-beb7-4b0f-b20b-c0d155437b90\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " Nov 24 01:49:10 crc kubenswrapper[4755]: I1124 01:49:10.988807 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-secret-0\") pod \"cff1906b-beb7-4b0f-b20b-c0d155437b90\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " Nov 24 01:49:10 crc kubenswrapper[4755]: I1124 01:49:10.988850 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-inventory\") pod \"cff1906b-beb7-4b0f-b20b-c0d155437b90\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " Nov 24 01:49:10 crc kubenswrapper[4755]: I1124 01:49:10.988928 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-combined-ca-bundle\") pod \"cff1906b-beb7-4b0f-b20b-c0d155437b90\" (UID: \"cff1906b-beb7-4b0f-b20b-c0d155437b90\") " Nov 24 01:49:10 crc kubenswrapper[4755]: I1124 01:49:10.999308 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "cff1906b-beb7-4b0f-b20b-c0d155437b90" (UID: "cff1906b-beb7-4b0f-b20b-c0d155437b90"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.014940 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cff1906b-beb7-4b0f-b20b-c0d155437b90-kube-api-access-mjz6x" (OuterVolumeSpecName: "kube-api-access-mjz6x") pod "cff1906b-beb7-4b0f-b20b-c0d155437b90" (UID: "cff1906b-beb7-4b0f-b20b-c0d155437b90"). InnerVolumeSpecName "kube-api-access-mjz6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.047834 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "cff1906b-beb7-4b0f-b20b-c0d155437b90" (UID: "cff1906b-beb7-4b0f-b20b-c0d155437b90"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.054298 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cff1906b-beb7-4b0f-b20b-c0d155437b90" (UID: "cff1906b-beb7-4b0f-b20b-c0d155437b90"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.056924 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-inventory" (OuterVolumeSpecName: "inventory") pod "cff1906b-beb7-4b0f-b20b-c0d155437b90" (UID: "cff1906b-beb7-4b0f-b20b-c0d155437b90"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.093018 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.093089 4755 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.093125 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.093157 4755 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cff1906b-beb7-4b0f-b20b-c0d155437b90-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.093182 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjz6x\" (UniqueName: \"kubernetes.io/projected/cff1906b-beb7-4b0f-b20b-c0d155437b90-kube-api-access-mjz6x\") on node \"crc\" DevicePath \"\"" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.287395 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" event={"ID":"cff1906b-beb7-4b0f-b20b-c0d155437b90","Type":"ContainerDied","Data":"821ea457a9859282e13557f589325ddbfc5c12f5038f8f0023ecd52b81f31a24"} Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.287457 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="821ea457a9859282e13557f589325ddbfc5c12f5038f8f0023ecd52b81f31a24" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.287780 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.388459 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6"] Nov 24 01:49:11 crc kubenswrapper[4755]: E1124 01:49:11.388992 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerName="extract-content" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.389024 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerName="extract-content" Nov 24 01:49:11 crc kubenswrapper[4755]: E1124 01:49:11.389052 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cff1906b-beb7-4b0f-b20b-c0d155437b90" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.389065 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="cff1906b-beb7-4b0f-b20b-c0d155437b90" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Nov 24 01:49:11 crc kubenswrapper[4755]: E1124 01:49:11.389090 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerName="extract-utilities" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.389103 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerName="extract-utilities" Nov 24 01:49:11 crc kubenswrapper[4755]: E1124 01:49:11.389132 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerName="registry-server" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.389142 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerName="registry-server" Nov 24 01:49:11 crc kubenswrapper[4755]: E1124 01:49:11.389162 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerName="extract-content" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.389172 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerName="extract-content" Nov 24 01:49:11 crc kubenswrapper[4755]: E1124 01:49:11.389197 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerName="extract-utilities" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.389207 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerName="extract-utilities" Nov 24 01:49:11 crc kubenswrapper[4755]: E1124 01:49:11.389228 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerName="registry-server" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.389238 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerName="registry-server" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.389528 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="36dc1e89-2e6f-479e-806a-75ece4ff32ec" containerName="registry-server" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.389560 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a29390c0-2bbd-4960-a20d-c3b69f300dc3" containerName="registry-server" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.389623 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="cff1906b-beb7-4b0f-b20b-c0d155437b90" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.390638 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.393226 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.393680 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.393920 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.394109 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.394383 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.394626 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.394904 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.399617 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.399798 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.399876 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.399945 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.400063 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k66xb\" (UniqueName: \"kubernetes.io/projected/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-kube-api-access-k66xb\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.400132 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.400204 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.400261 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6"] Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.400275 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.400391 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.501997 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.502067 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.502186 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k66xb\" (UniqueName: \"kubernetes.io/projected/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-kube-api-access-k66xb\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.502215 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.502248 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.502290 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.502331 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.502467 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.502517 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.504298 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.507182 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.507554 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.507808 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.508766 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.510207 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.510532 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.519473 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.528850 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k66xb\" (UniqueName: \"kubernetes.io/projected/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-kube-api-access-k66xb\") pod \"nova-edpm-deployment-openstack-edpm-ipam-wx7v6\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:11 crc kubenswrapper[4755]: I1124 01:49:11.714874 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:49:12 crc kubenswrapper[4755]: I1124 01:49:12.273440 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6"] Nov 24 01:49:12 crc kubenswrapper[4755]: I1124 01:49:12.276585 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 01:49:12 crc kubenswrapper[4755]: I1124 01:49:12.297202 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" event={"ID":"3f6ff548-9e89-4d7c-8a41-d5c769a8d871","Type":"ContainerStarted","Data":"3bbf2058c8a377f2b28fd04a6e98424b149be7fa89a10b85001e8b1da240615d"} Nov 24 01:49:14 crc kubenswrapper[4755]: I1124 01:49:14.345991 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" event={"ID":"3f6ff548-9e89-4d7c-8a41-d5c769a8d871","Type":"ContainerStarted","Data":"ce2149314788af8c8feaec926f8b3f744eb002c017c63def20e92578571b9b75"} Nov 24 01:49:14 crc kubenswrapper[4755]: I1124 01:49:14.378495 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" podStartSLOduration=2.388545026 podStartE2EDuration="3.378475414s" podCreationTimestamp="2025-11-24 01:49:11 +0000 UTC" firstStartedPulling="2025-11-24 01:49:12.276358154 +0000 UTC m=+2176.962423655" lastFinishedPulling="2025-11-24 01:49:13.266288512 +0000 UTC m=+2177.952354043" observedRunningTime="2025-11-24 01:49:14.378273918 +0000 UTC m=+2179.064339439" watchObservedRunningTime="2025-11-24 01:49:14.378475414 +0000 UTC m=+2179.064540915" Nov 24 01:49:33 crc kubenswrapper[4755]: I1124 01:49:33.295401 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:49:33 crc kubenswrapper[4755]: I1124 01:49:33.295981 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:50:03 crc kubenswrapper[4755]: I1124 01:50:03.295282 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:50:03 crc kubenswrapper[4755]: I1124 01:50:03.296003 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:50:03 crc kubenswrapper[4755]: I1124 01:50:03.296089 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:50:03 crc kubenswrapper[4755]: I1124 01:50:03.297183 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:50:03 crc kubenswrapper[4755]: I1124 01:50:03.297291 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" gracePeriod=600 Nov 24 01:50:03 crc kubenswrapper[4755]: E1124 01:50:03.418745 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:50:04 crc kubenswrapper[4755]: I1124 01:50:04.370889 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" exitCode=0 Nov 24 01:50:04 crc kubenswrapper[4755]: I1124 01:50:04.370983 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e"} Nov 24 01:50:04 crc kubenswrapper[4755]: I1124 01:50:04.371062 4755 scope.go:117] "RemoveContainer" containerID="8fe5f5cacaadeb1d6112d56d1f9d970720cdea615fa510a42f2d8a67230c0f60" Nov 24 01:50:04 crc kubenswrapper[4755]: I1124 01:50:04.371689 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:50:04 crc kubenswrapper[4755]: E1124 01:50:04.371936 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:50:16 crc kubenswrapper[4755]: I1124 01:50:16.996756 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:50:16 crc kubenswrapper[4755]: E1124 01:50:16.997479 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:50:30 crc kubenswrapper[4755]: I1124 01:50:30.997321 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:50:30 crc kubenswrapper[4755]: E1124 01:50:30.998350 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:50:44 crc kubenswrapper[4755]: I1124 01:50:44.996683 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:50:44 crc kubenswrapper[4755]: E1124 01:50:44.997353 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:50:57 crc kubenswrapper[4755]: I1124 01:50:57.997175 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:50:57 crc kubenswrapper[4755]: E1124 01:50:57.997921 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:51:11 crc kubenswrapper[4755]: I1124 01:51:11.998211 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:51:11 crc kubenswrapper[4755]: E1124 01:51:11.999081 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:51:22 crc kubenswrapper[4755]: I1124 01:51:22.997538 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:51:22 crc kubenswrapper[4755]: E1124 01:51:22.998546 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:51:36 crc kubenswrapper[4755]: I1124 01:51:36.002065 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:51:36 crc kubenswrapper[4755]: E1124 01:51:36.002986 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:51:46 crc kubenswrapper[4755]: I1124 01:51:46.996714 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:51:46 crc kubenswrapper[4755]: E1124 01:51:46.997838 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.082562 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pbd7f"] Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.084869 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.111617 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pbd7f"] Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.217516 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95qgz\" (UniqueName: \"kubernetes.io/projected/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-kube-api-access-95qgz\") pod \"community-operators-pbd7f\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.217951 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-utilities\") pod \"community-operators-pbd7f\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.218176 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-catalog-content\") pod \"community-operators-pbd7f\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.319919 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-catalog-content\") pod \"community-operators-pbd7f\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.320035 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95qgz\" (UniqueName: \"kubernetes.io/projected/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-kube-api-access-95qgz\") pod \"community-operators-pbd7f\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.320164 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-utilities\") pod \"community-operators-pbd7f\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.320758 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-utilities\") pod \"community-operators-pbd7f\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.320796 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-catalog-content\") pod \"community-operators-pbd7f\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.343587 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95qgz\" (UniqueName: \"kubernetes.io/projected/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-kube-api-access-95qgz\") pod \"community-operators-pbd7f\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:55 crc kubenswrapper[4755]: I1124 01:51:55.403973 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:51:56 crc kubenswrapper[4755]: I1124 01:51:56.007372 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pbd7f"] Nov 24 01:51:56 crc kubenswrapper[4755]: I1124 01:51:56.457847 4755 generic.go:334] "Generic (PLEG): container finished" podID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerID="c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf" exitCode=0 Nov 24 01:51:56 crc kubenswrapper[4755]: I1124 01:51:56.457953 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbd7f" event={"ID":"76666aa5-8cc3-4797-ac84-5d921ea5e4e6","Type":"ContainerDied","Data":"c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf"} Nov 24 01:51:56 crc kubenswrapper[4755]: I1124 01:51:56.458217 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbd7f" event={"ID":"76666aa5-8cc3-4797-ac84-5d921ea5e4e6","Type":"ContainerStarted","Data":"7c3f299f9ad92803a0e11d59aaf44aa2bea45e63eeb7d76776f4b587c894aa24"} Nov 24 01:51:57 crc kubenswrapper[4755]: I1124 01:51:57.475258 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbd7f" event={"ID":"76666aa5-8cc3-4797-ac84-5d921ea5e4e6","Type":"ContainerStarted","Data":"89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd"} Nov 24 01:51:57 crc kubenswrapper[4755]: I1124 01:51:57.996018 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:51:57 crc kubenswrapper[4755]: E1124 01:51:57.996274 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:51:58 crc kubenswrapper[4755]: I1124 01:51:58.484971 4755 generic.go:334] "Generic (PLEG): container finished" podID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerID="89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd" exitCode=0 Nov 24 01:51:58 crc kubenswrapper[4755]: I1124 01:51:58.485014 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbd7f" event={"ID":"76666aa5-8cc3-4797-ac84-5d921ea5e4e6","Type":"ContainerDied","Data":"89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd"} Nov 24 01:51:59 crc kubenswrapper[4755]: I1124 01:51:59.494508 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbd7f" event={"ID":"76666aa5-8cc3-4797-ac84-5d921ea5e4e6","Type":"ContainerStarted","Data":"59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b"} Nov 24 01:52:03 crc kubenswrapper[4755]: I1124 01:52:03.531040 4755 generic.go:334] "Generic (PLEG): container finished" podID="3f6ff548-9e89-4d7c-8a41-d5c769a8d871" containerID="ce2149314788af8c8feaec926f8b3f744eb002c017c63def20e92578571b9b75" exitCode=0 Nov 24 01:52:03 crc kubenswrapper[4755]: I1124 01:52:03.531171 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" event={"ID":"3f6ff548-9e89-4d7c-8a41-d5c769a8d871","Type":"ContainerDied","Data":"ce2149314788af8c8feaec926f8b3f744eb002c017c63def20e92578571b9b75"} Nov 24 01:52:03 crc kubenswrapper[4755]: I1124 01:52:03.552340 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pbd7f" podStartSLOduration=6.075084958 podStartE2EDuration="8.552314439s" podCreationTimestamp="2025-11-24 01:51:55 +0000 UTC" firstStartedPulling="2025-11-24 01:51:56.459719586 +0000 UTC m=+2341.145785097" lastFinishedPulling="2025-11-24 01:51:58.936949067 +0000 UTC m=+2343.623014578" observedRunningTime="2025-11-24 01:51:59.513761223 +0000 UTC m=+2344.199826754" watchObservedRunningTime="2025-11-24 01:52:03.552314439 +0000 UTC m=+2348.238379960" Nov 24 01:52:04 crc kubenswrapper[4755]: I1124 01:52:04.939021 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.097308 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k66xb\" (UniqueName: \"kubernetes.io/projected/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-kube-api-access-k66xb\") pod \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.097377 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-extra-config-0\") pod \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.097405 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-combined-ca-bundle\") pod \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.097424 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-1\") pod \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.097503 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-inventory\") pod \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.097547 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-0\") pod \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.097596 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-1\") pod \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.097628 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-0\") pod \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.097673 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-ssh-key\") pod \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\" (UID: \"3f6ff548-9e89-4d7c-8a41-d5c769a8d871\") " Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.104268 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "3f6ff548-9e89-4d7c-8a41-d5c769a8d871" (UID: "3f6ff548-9e89-4d7c-8a41-d5c769a8d871"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.119826 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-kube-api-access-k66xb" (OuterVolumeSpecName: "kube-api-access-k66xb") pod "3f6ff548-9e89-4d7c-8a41-d5c769a8d871" (UID: "3f6ff548-9e89-4d7c-8a41-d5c769a8d871"). InnerVolumeSpecName "kube-api-access-k66xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.124681 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3f6ff548-9e89-4d7c-8a41-d5c769a8d871" (UID: "3f6ff548-9e89-4d7c-8a41-d5c769a8d871"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.127564 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "3f6ff548-9e89-4d7c-8a41-d5c769a8d871" (UID: "3f6ff548-9e89-4d7c-8a41-d5c769a8d871"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.128507 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "3f6ff548-9e89-4d7c-8a41-d5c769a8d871" (UID: "3f6ff548-9e89-4d7c-8a41-d5c769a8d871"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.131342 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "3f6ff548-9e89-4d7c-8a41-d5c769a8d871" (UID: "3f6ff548-9e89-4d7c-8a41-d5c769a8d871"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.132793 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "3f6ff548-9e89-4d7c-8a41-d5c769a8d871" (UID: "3f6ff548-9e89-4d7c-8a41-d5c769a8d871"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.146691 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-inventory" (OuterVolumeSpecName: "inventory") pod "3f6ff548-9e89-4d7c-8a41-d5c769a8d871" (UID: "3f6ff548-9e89-4d7c-8a41-d5c769a8d871"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.150761 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "3f6ff548-9e89-4d7c-8a41-d5c769a8d871" (UID: "3f6ff548-9e89-4d7c-8a41-d5c769a8d871"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.199970 4755 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.200009 4755 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.200023 4755 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.200035 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.200047 4755 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.200058 4755 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.200069 4755 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.200080 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.200091 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k66xb\" (UniqueName: \"kubernetes.io/projected/3f6ff548-9e89-4d7c-8a41-d5c769a8d871-kube-api-access-k66xb\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.404799 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.404863 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.496001 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.552885 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" event={"ID":"3f6ff548-9e89-4d7c-8a41-d5c769a8d871","Type":"ContainerDied","Data":"3bbf2058c8a377f2b28fd04a6e98424b149be7fa89a10b85001e8b1da240615d"} Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.552927 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bbf2058c8a377f2b28fd04a6e98424b149be7fa89a10b85001e8b1da240615d" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.552986 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-wx7v6" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.629417 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.654121 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk"] Nov 24 01:52:05 crc kubenswrapper[4755]: E1124 01:52:05.654649 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f6ff548-9e89-4d7c-8a41-d5c769a8d871" containerName="nova-edpm-deployment-openstack-edpm-ipam" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.654675 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f6ff548-9e89-4d7c-8a41-d5c769a8d871" containerName="nova-edpm-deployment-openstack-edpm-ipam" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.654921 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f6ff548-9e89-4d7c-8a41-d5c769a8d871" containerName="nova-edpm-deployment-openstack-edpm-ipam" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.655723 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.657507 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.657666 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-4dz5v" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.658056 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.661254 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.664465 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.672332 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk"] Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.754096 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pbd7f"] Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.814754 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.814823 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.814860 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.814901 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rblk\" (UniqueName: \"kubernetes.io/projected/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-kube-api-access-6rblk\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.815166 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.815388 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.815485 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.917551 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.917654 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.917689 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.917743 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rblk\" (UniqueName: \"kubernetes.io/projected/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-kube-api-access-6rblk\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.917805 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.917870 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.917909 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.922537 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.923254 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.923595 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.923741 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.929028 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.932825 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.934891 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rblk\" (UniqueName: \"kubernetes.io/projected/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-kube-api-access-6rblk\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7grgk\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:05 crc kubenswrapper[4755]: I1124 01:52:05.974080 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:52:06 crc kubenswrapper[4755]: I1124 01:52:06.515528 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk"] Nov 24 01:52:06 crc kubenswrapper[4755]: W1124 01:52:06.518161 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8f52ac0_a9d0_48c2_b1dc_0ebc5bfd4ad7.slice/crio-d7de879fd0572e7718380c9757cb38449acab58b0f5cb284ceea71f3e7ec9780 WatchSource:0}: Error finding container d7de879fd0572e7718380c9757cb38449acab58b0f5cb284ceea71f3e7ec9780: Status 404 returned error can't find the container with id d7de879fd0572e7718380c9757cb38449acab58b0f5cb284ceea71f3e7ec9780 Nov 24 01:52:06 crc kubenswrapper[4755]: I1124 01:52:06.562805 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" event={"ID":"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7","Type":"ContainerStarted","Data":"d7de879fd0572e7718380c9757cb38449acab58b0f5cb284ceea71f3e7ec9780"} Nov 24 01:52:07 crc kubenswrapper[4755]: I1124 01:52:07.572938 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" event={"ID":"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7","Type":"ContainerStarted","Data":"5996d89017f2f6b4ab480c913e3fdd79fb72b88d6cceb26f894c008032f39154"} Nov 24 01:52:07 crc kubenswrapper[4755]: I1124 01:52:07.573484 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pbd7f" podUID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerName="registry-server" containerID="cri-o://59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b" gracePeriod=2 Nov 24 01:52:07 crc kubenswrapper[4755]: I1124 01:52:07.595547 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" podStartSLOduration=1.969350712 podStartE2EDuration="2.595523435s" podCreationTimestamp="2025-11-24 01:52:05 +0000 UTC" firstStartedPulling="2025-11-24 01:52:06.520454402 +0000 UTC m=+2351.206519903" lastFinishedPulling="2025-11-24 01:52:07.146627125 +0000 UTC m=+2351.832692626" observedRunningTime="2025-11-24 01:52:07.593015555 +0000 UTC m=+2352.279081086" watchObservedRunningTime="2025-11-24 01:52:07.595523435 +0000 UTC m=+2352.281588956" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.048829 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.163623 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-utilities\") pod \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.164003 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-catalog-content\") pod \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.164295 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95qgz\" (UniqueName: \"kubernetes.io/projected/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-kube-api-access-95qgz\") pod \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\" (UID: \"76666aa5-8cc3-4797-ac84-5d921ea5e4e6\") " Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.164638 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-utilities" (OuterVolumeSpecName: "utilities") pod "76666aa5-8cc3-4797-ac84-5d921ea5e4e6" (UID: "76666aa5-8cc3-4797-ac84-5d921ea5e4e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.165064 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.181550 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-kube-api-access-95qgz" (OuterVolumeSpecName: "kube-api-access-95qgz") pod "76666aa5-8cc3-4797-ac84-5d921ea5e4e6" (UID: "76666aa5-8cc3-4797-ac84-5d921ea5e4e6"). InnerVolumeSpecName "kube-api-access-95qgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.222979 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "76666aa5-8cc3-4797-ac84-5d921ea5e4e6" (UID: "76666aa5-8cc3-4797-ac84-5d921ea5e4e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.267307 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95qgz\" (UniqueName: \"kubernetes.io/projected/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-kube-api-access-95qgz\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.267370 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76666aa5-8cc3-4797-ac84-5d921ea5e4e6-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.582768 4755 generic.go:334] "Generic (PLEG): container finished" podID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerID="59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b" exitCode=0 Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.582885 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbd7f" event={"ID":"76666aa5-8cc3-4797-ac84-5d921ea5e4e6","Type":"ContainerDied","Data":"59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b"} Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.582957 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pbd7f" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.583204 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbd7f" event={"ID":"76666aa5-8cc3-4797-ac84-5d921ea5e4e6","Type":"ContainerDied","Data":"7c3f299f9ad92803a0e11d59aaf44aa2bea45e63eeb7d76776f4b587c894aa24"} Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.583272 4755 scope.go:117] "RemoveContainer" containerID="59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.616422 4755 scope.go:117] "RemoveContainer" containerID="89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.627792 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pbd7f"] Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.638324 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pbd7f"] Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.647839 4755 scope.go:117] "RemoveContainer" containerID="c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.727633 4755 scope.go:117] "RemoveContainer" containerID="59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b" Nov 24 01:52:08 crc kubenswrapper[4755]: E1124 01:52:08.728092 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b\": container with ID starting with 59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b not found: ID does not exist" containerID="59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.728142 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b"} err="failed to get container status \"59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b\": rpc error: code = NotFound desc = could not find container \"59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b\": container with ID starting with 59fbbe56d7b66d50701b287dba9835b812d7e6f0877aa4c60113623c435fcd5b not found: ID does not exist" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.728174 4755 scope.go:117] "RemoveContainer" containerID="89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd" Nov 24 01:52:08 crc kubenswrapper[4755]: E1124 01:52:08.728560 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd\": container with ID starting with 89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd not found: ID does not exist" containerID="89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.728596 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd"} err="failed to get container status \"89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd\": rpc error: code = NotFound desc = could not find container \"89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd\": container with ID starting with 89d8360ef1a713d7d40cc9f1f69f02ecac940c9942f061e0801cff929f96f2dd not found: ID does not exist" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.728630 4755 scope.go:117] "RemoveContainer" containerID="c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf" Nov 24 01:52:08 crc kubenswrapper[4755]: E1124 01:52:08.728913 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf\": container with ID starting with c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf not found: ID does not exist" containerID="c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.728946 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf"} err="failed to get container status \"c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf\": rpc error: code = NotFound desc = could not find container \"c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf\": container with ID starting with c20246f88fc222ef4c5f4396434c58e0787a54d8936a23bfcc4c1f32e28dbdcf not found: ID does not exist" Nov 24 01:52:08 crc kubenswrapper[4755]: I1124 01:52:08.997084 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:52:08 crc kubenswrapper[4755]: E1124 01:52:08.997346 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:52:10 crc kubenswrapper[4755]: I1124 01:52:10.013296 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" path="/var/lib/kubelet/pods/76666aa5-8cc3-4797-ac84-5d921ea5e4e6/volumes" Nov 24 01:52:20 crc kubenswrapper[4755]: I1124 01:52:20.997101 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:52:20 crc kubenswrapper[4755]: E1124 01:52:20.998268 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:52:31 crc kubenswrapper[4755]: I1124 01:52:31.997399 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:52:31 crc kubenswrapper[4755]: E1124 01:52:31.998318 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:52:45 crc kubenswrapper[4755]: I1124 01:52:45.996582 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:52:45 crc kubenswrapper[4755]: E1124 01:52:45.997409 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:52:59 crc kubenswrapper[4755]: I1124 01:52:59.997084 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:52:59 crc kubenswrapper[4755]: E1124 01:52:59.997920 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:53:14 crc kubenswrapper[4755]: I1124 01:53:14.996922 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:53:14 crc kubenswrapper[4755]: E1124 01:53:14.997757 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:53:29 crc kubenswrapper[4755]: I1124 01:53:29.996519 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:53:29 crc kubenswrapper[4755]: E1124 01:53:29.997201 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:53:41 crc kubenswrapper[4755]: I1124 01:53:41.996948 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:53:41 crc kubenswrapper[4755]: E1124 01:53:41.998282 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:53:56 crc kubenswrapper[4755]: I1124 01:53:56.996394 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:53:56 crc kubenswrapper[4755]: E1124 01:53:56.997278 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:54:08 crc kubenswrapper[4755]: I1124 01:54:08.998639 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:54:09 crc kubenswrapper[4755]: E1124 01:54:08.999834 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:54:23 crc kubenswrapper[4755]: I1124 01:54:23.997178 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:54:23 crc kubenswrapper[4755]: E1124 01:54:23.998233 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:54:30 crc kubenswrapper[4755]: I1124 01:54:30.997290 4755 generic.go:334] "Generic (PLEG): container finished" podID="c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" containerID="5996d89017f2f6b4ab480c913e3fdd79fb72b88d6cceb26f894c008032f39154" exitCode=0 Nov 24 01:54:30 crc kubenswrapper[4755]: I1124 01:54:30.997302 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" event={"ID":"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7","Type":"ContainerDied","Data":"5996d89017f2f6b4ab480c913e3fdd79fb72b88d6cceb26f894c008032f39154"} Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.399573 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.541861 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ssh-key\") pod \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.541937 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-inventory\") pod \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.542063 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-2\") pod \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.542914 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-0\") pod \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.542955 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rblk\" (UniqueName: \"kubernetes.io/projected/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-kube-api-access-6rblk\") pod \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.543022 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-telemetry-combined-ca-bundle\") pod \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.543076 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-1\") pod \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\" (UID: \"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7\") " Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.550346 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" (UID: "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.550992 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-kube-api-access-6rblk" (OuterVolumeSpecName: "kube-api-access-6rblk") pod "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" (UID: "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7"). InnerVolumeSpecName "kube-api-access-6rblk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.575383 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" (UID: "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.580875 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-inventory" (OuterVolumeSpecName: "inventory") pod "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" (UID: "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.584029 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" (UID: "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.595028 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" (UID: "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.599009 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" (UID: "c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.646102 4755 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.646172 4755 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.646192 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.646211 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-inventory\") on node \"crc\" DevicePath \"\"" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.646229 4755 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.646250 4755 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Nov 24 01:54:32 crc kubenswrapper[4755]: I1124 01:54:32.646268 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rblk\" (UniqueName: \"kubernetes.io/projected/c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7-kube-api-access-6rblk\") on node \"crc\" DevicePath \"\"" Nov 24 01:54:33 crc kubenswrapper[4755]: I1124 01:54:33.022265 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" event={"ID":"c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7","Type":"ContainerDied","Data":"d7de879fd0572e7718380c9757cb38449acab58b0f5cb284ceea71f3e7ec9780"} Nov 24 01:54:33 crc kubenswrapper[4755]: I1124 01:54:33.022308 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7de879fd0572e7718380c9757cb38449acab58b0f5cb284ceea71f3e7ec9780" Nov 24 01:54:33 crc kubenswrapper[4755]: I1124 01:54:33.022312 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7grgk" Nov 24 01:54:36 crc kubenswrapper[4755]: I1124 01:54:36.996775 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:54:36 crc kubenswrapper[4755]: E1124 01:54:36.997745 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:54:52 crc kubenswrapper[4755]: I1124 01:54:52.000723 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:54:52 crc kubenswrapper[4755]: E1124 01:54:52.001411 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 01:54:59 crc kubenswrapper[4755]: E1124 01:54:59.774705 4755 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.103:45940->38.102.83.103:36035: write tcp 38.102.83.103:45940->38.102.83.103:36035: write: broken pipe Nov 24 01:55:04 crc kubenswrapper[4755]: I1124 01:55:04.996373 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:55:05 crc kubenswrapper[4755]: I1124 01:55:05.379831 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"63caa319a5617588dfc38fc4ec055bba7992a0b755612b0a3d97cdb4e6b2845b"} Nov 24 01:55:18 crc kubenswrapper[4755]: E1124 01:55:18.760991 4755 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.103:33876->38.102.83.103:36035: read tcp 38.102.83.103:33876->38.102.83.103:36035: read: connection reset by peer Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.165905 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Nov 24 01:55:31 crc kubenswrapper[4755]: E1124 01:55:31.167966 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerName="registry-server" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.167994 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerName="registry-server" Nov 24 01:55:31 crc kubenswrapper[4755]: E1124 01:55:31.168063 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerName="extract-content" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.168079 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerName="extract-content" Nov 24 01:55:31 crc kubenswrapper[4755]: E1124 01:55:31.168130 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.168151 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Nov 24 01:55:31 crc kubenswrapper[4755]: E1124 01:55:31.168171 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerName="extract-utilities" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.168185 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerName="extract-utilities" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.169011 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="76666aa5-8cc3-4797-ac84-5d921ea5e4e6" containerName="registry-server" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.169060 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.170813 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.179870 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.190160 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.190189 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.190829 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.194390 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-q8kvm" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.358248 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jv5x6\" (UniqueName: \"kubernetes.io/projected/23d50e60-91da-42c3-8d11-5c22eab88929-kube-api-access-jv5x6\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.358332 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.358371 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.358409 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.358436 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.358480 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.358505 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.358528 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-config-data\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.358635 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.460718 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.461033 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.461160 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-config-data\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.461427 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.461661 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jv5x6\" (UniqueName: \"kubernetes.io/projected/23d50e60-91da-42c3-8d11-5c22eab88929-kube-api-access-jv5x6\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.461798 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.461989 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.462127 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.462229 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.462412 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-config-data\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.462532 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.462977 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.463215 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.464397 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.470523 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.470615 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.475899 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.493785 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.498476 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jv5x6\" (UniqueName: \"kubernetes.io/projected/23d50e60-91da-42c3-8d11-5c22eab88929-kube-api-access-jv5x6\") pod \"tempest-tests-tempest\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.513081 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.952771 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Nov 24 01:55:31 crc kubenswrapper[4755]: I1124 01:55:31.969055 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 01:55:32 crc kubenswrapper[4755]: I1124 01:55:32.652797 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"23d50e60-91da-42c3-8d11-5c22eab88929","Type":"ContainerStarted","Data":"997445fcb031ccebc6e22b9caf19e1cc4cf47d9ea3c3c411e6f2c47a3321d1a7"} Nov 24 01:56:00 crc kubenswrapper[4755]: E1124 01:56:00.089521 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Nov 24 01:56:00 crc kubenswrapper[4755]: E1124 01:56:00.090404 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jv5x6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(23d50e60-91da-42c3-8d11-5c22eab88929): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 01:56:00 crc kubenswrapper[4755]: E1124 01:56:00.091766 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="23d50e60-91da-42c3-8d11-5c22eab88929" Nov 24 01:56:00 crc kubenswrapper[4755]: E1124 01:56:00.905212 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="23d50e60-91da-42c3-8d11-5c22eab88929" Nov 24 01:56:12 crc kubenswrapper[4755]: I1124 01:56:12.434557 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Nov 24 01:56:14 crc kubenswrapper[4755]: I1124 01:56:14.032934 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"23d50e60-91da-42c3-8d11-5c22eab88929","Type":"ContainerStarted","Data":"87b3a178f91345ecb7a9925054b60644de8b8680a70e7e8a75b7f1e98581bcdb"} Nov 24 01:56:14 crc kubenswrapper[4755]: I1124 01:56:14.071884 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.609835105 podStartE2EDuration="44.071850192s" podCreationTimestamp="2025-11-24 01:55:30 +0000 UTC" firstStartedPulling="2025-11-24 01:55:31.968873872 +0000 UTC m=+2556.654939373" lastFinishedPulling="2025-11-24 01:56:12.430888939 +0000 UTC m=+2597.116954460" observedRunningTime="2025-11-24 01:56:14.059380431 +0000 UTC m=+2598.745445932" watchObservedRunningTime="2025-11-24 01:56:14.071850192 +0000 UTC m=+2598.757915733" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.149375 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4q9d6"] Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.153807 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.162197 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4q9d6"] Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.235191 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-catalog-content\") pod \"redhat-operators-4q9d6\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.235443 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-utilities\") pod \"redhat-operators-4q9d6\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.235548 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcvpn\" (UniqueName: \"kubernetes.io/projected/4977e72e-39fd-42d5-9017-4d9391d7392a-kube-api-access-gcvpn\") pod \"redhat-operators-4q9d6\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.336859 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcvpn\" (UniqueName: \"kubernetes.io/projected/4977e72e-39fd-42d5-9017-4d9391d7392a-kube-api-access-gcvpn\") pod \"redhat-operators-4q9d6\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.337222 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-catalog-content\") pod \"redhat-operators-4q9d6\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.337498 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-utilities\") pod \"redhat-operators-4q9d6\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.337737 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-catalog-content\") pod \"redhat-operators-4q9d6\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.337884 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-utilities\") pod \"redhat-operators-4q9d6\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.358201 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcvpn\" (UniqueName: \"kubernetes.io/projected/4977e72e-39fd-42d5-9017-4d9391d7392a-kube-api-access-gcvpn\") pod \"redhat-operators-4q9d6\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.482835 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:26 crc kubenswrapper[4755]: I1124 01:57:26.958453 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4q9d6"] Nov 24 01:57:27 crc kubenswrapper[4755]: I1124 01:57:27.725229 4755 generic.go:334] "Generic (PLEG): container finished" podID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerID="b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4" exitCode=0 Nov 24 01:57:27 crc kubenswrapper[4755]: I1124 01:57:27.725327 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q9d6" event={"ID":"4977e72e-39fd-42d5-9017-4d9391d7392a","Type":"ContainerDied","Data":"b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4"} Nov 24 01:57:27 crc kubenswrapper[4755]: I1124 01:57:27.725710 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q9d6" event={"ID":"4977e72e-39fd-42d5-9017-4d9391d7392a","Type":"ContainerStarted","Data":"5bd5fc7d927458e64bacd6919f6306b0f0c12bce40eb7d5039475b9dde309a2e"} Nov 24 01:57:28 crc kubenswrapper[4755]: I1124 01:57:28.737661 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q9d6" event={"ID":"4977e72e-39fd-42d5-9017-4d9391d7392a","Type":"ContainerStarted","Data":"8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6"} Nov 24 01:57:30 crc kubenswrapper[4755]: I1124 01:57:30.758714 4755 generic.go:334] "Generic (PLEG): container finished" podID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerID="8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6" exitCode=0 Nov 24 01:57:30 crc kubenswrapper[4755]: I1124 01:57:30.759175 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q9d6" event={"ID":"4977e72e-39fd-42d5-9017-4d9391d7392a","Type":"ContainerDied","Data":"8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6"} Nov 24 01:57:31 crc kubenswrapper[4755]: I1124 01:57:31.776397 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q9d6" event={"ID":"4977e72e-39fd-42d5-9017-4d9391d7392a","Type":"ContainerStarted","Data":"c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e"} Nov 24 01:57:31 crc kubenswrapper[4755]: I1124 01:57:31.797595 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4q9d6" podStartSLOduration=2.333861307 podStartE2EDuration="5.797579596s" podCreationTimestamp="2025-11-24 01:57:26 +0000 UTC" firstStartedPulling="2025-11-24 01:57:27.727660834 +0000 UTC m=+2672.413726335" lastFinishedPulling="2025-11-24 01:57:31.191379113 +0000 UTC m=+2675.877444624" observedRunningTime="2025-11-24 01:57:31.793345927 +0000 UTC m=+2676.479411428" watchObservedRunningTime="2025-11-24 01:57:31.797579596 +0000 UTC m=+2676.483645097" Nov 24 01:57:33 crc kubenswrapper[4755]: I1124 01:57:33.294726 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:57:33 crc kubenswrapper[4755]: I1124 01:57:33.294982 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:57:36 crc kubenswrapper[4755]: I1124 01:57:36.483283 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:36 crc kubenswrapper[4755]: I1124 01:57:36.483860 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:37 crc kubenswrapper[4755]: I1124 01:57:37.548291 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4q9d6" podUID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerName="registry-server" probeResult="failure" output=< Nov 24 01:57:37 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Nov 24 01:57:37 crc kubenswrapper[4755]: > Nov 24 01:57:46 crc kubenswrapper[4755]: I1124 01:57:46.547027 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:46 crc kubenswrapper[4755]: I1124 01:57:46.615866 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:46 crc kubenswrapper[4755]: I1124 01:57:46.782892 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4q9d6"] Nov 24 01:57:47 crc kubenswrapper[4755]: I1124 01:57:47.938399 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4q9d6" podUID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerName="registry-server" containerID="cri-o://c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e" gracePeriod=2 Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.463785 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.593258 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-catalog-content\") pod \"4977e72e-39fd-42d5-9017-4d9391d7392a\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.593740 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-utilities\") pod \"4977e72e-39fd-42d5-9017-4d9391d7392a\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.593796 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcvpn\" (UniqueName: \"kubernetes.io/projected/4977e72e-39fd-42d5-9017-4d9391d7392a-kube-api-access-gcvpn\") pod \"4977e72e-39fd-42d5-9017-4d9391d7392a\" (UID: \"4977e72e-39fd-42d5-9017-4d9391d7392a\") " Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.594779 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-utilities" (OuterVolumeSpecName: "utilities") pod "4977e72e-39fd-42d5-9017-4d9391d7392a" (UID: "4977e72e-39fd-42d5-9017-4d9391d7392a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.599942 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4977e72e-39fd-42d5-9017-4d9391d7392a-kube-api-access-gcvpn" (OuterVolumeSpecName: "kube-api-access-gcvpn") pod "4977e72e-39fd-42d5-9017-4d9391d7392a" (UID: "4977e72e-39fd-42d5-9017-4d9391d7392a"). InnerVolumeSpecName "kube-api-access-gcvpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.691464 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4977e72e-39fd-42d5-9017-4d9391d7392a" (UID: "4977e72e-39fd-42d5-9017-4d9391d7392a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.697546 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.697597 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4977e72e-39fd-42d5-9017-4d9391d7392a-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.697632 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcvpn\" (UniqueName: \"kubernetes.io/projected/4977e72e-39fd-42d5-9017-4d9391d7392a-kube-api-access-gcvpn\") on node \"crc\" DevicePath \"\"" Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.950844 4755 generic.go:334] "Generic (PLEG): container finished" podID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerID="c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e" exitCode=0 Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.950934 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4q9d6" Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.950922 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q9d6" event={"ID":"4977e72e-39fd-42d5-9017-4d9391d7392a","Type":"ContainerDied","Data":"c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e"} Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.951089 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q9d6" event={"ID":"4977e72e-39fd-42d5-9017-4d9391d7392a","Type":"ContainerDied","Data":"5bd5fc7d927458e64bacd6919f6306b0f0c12bce40eb7d5039475b9dde309a2e"} Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.951110 4755 scope.go:117] "RemoveContainer" containerID="c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e" Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.975777 4755 scope.go:117] "RemoveContainer" containerID="8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6" Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.986724 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4q9d6"] Nov 24 01:57:48 crc kubenswrapper[4755]: I1124 01:57:48.993332 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4q9d6"] Nov 24 01:57:49 crc kubenswrapper[4755]: I1124 01:57:49.011344 4755 scope.go:117] "RemoveContainer" containerID="b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4" Nov 24 01:57:49 crc kubenswrapper[4755]: I1124 01:57:49.050467 4755 scope.go:117] "RemoveContainer" containerID="c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e" Nov 24 01:57:49 crc kubenswrapper[4755]: E1124 01:57:49.053049 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e\": container with ID starting with c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e not found: ID does not exist" containerID="c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e" Nov 24 01:57:49 crc kubenswrapper[4755]: I1124 01:57:49.053093 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e"} err="failed to get container status \"c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e\": rpc error: code = NotFound desc = could not find container \"c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e\": container with ID starting with c2ad22355dca9b614af7de516fc15e3f4dabf588e609b57628a145b656748a7e not found: ID does not exist" Nov 24 01:57:49 crc kubenswrapper[4755]: I1124 01:57:49.053120 4755 scope.go:117] "RemoveContainer" containerID="8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6" Nov 24 01:57:49 crc kubenswrapper[4755]: E1124 01:57:49.053499 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6\": container with ID starting with 8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6 not found: ID does not exist" containerID="8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6" Nov 24 01:57:49 crc kubenswrapper[4755]: I1124 01:57:49.053518 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6"} err="failed to get container status \"8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6\": rpc error: code = NotFound desc = could not find container \"8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6\": container with ID starting with 8b3be43193545424bf3fcf093991452389d9d1f802831d1a08445600862e7ff6 not found: ID does not exist" Nov 24 01:57:49 crc kubenswrapper[4755]: I1124 01:57:49.053533 4755 scope.go:117] "RemoveContainer" containerID="b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4" Nov 24 01:57:49 crc kubenswrapper[4755]: E1124 01:57:49.053981 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4\": container with ID starting with b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4 not found: ID does not exist" containerID="b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4" Nov 24 01:57:49 crc kubenswrapper[4755]: I1124 01:57:49.054031 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4"} err="failed to get container status \"b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4\": rpc error: code = NotFound desc = could not find container \"b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4\": container with ID starting with b90e8da5f8d7bcf07b901a448cc2410b0aedae185c043d2beb05beff65ff2ab4 not found: ID does not exist" Nov 24 01:57:50 crc kubenswrapper[4755]: I1124 01:57:50.011875 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4977e72e-39fd-42d5-9017-4d9391d7392a" path="/var/lib/kubelet/pods/4977e72e-39fd-42d5-9017-4d9391d7392a/volumes" Nov 24 01:58:03 crc kubenswrapper[4755]: I1124 01:58:03.295088 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:58:03 crc kubenswrapper[4755]: I1124 01:58:03.295563 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:58:33 crc kubenswrapper[4755]: I1124 01:58:33.294866 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 01:58:33 crc kubenswrapper[4755]: I1124 01:58:33.295674 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 01:58:33 crc kubenswrapper[4755]: I1124 01:58:33.295738 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 01:58:33 crc kubenswrapper[4755]: I1124 01:58:33.296850 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"63caa319a5617588dfc38fc4ec055bba7992a0b755612b0a3d97cdb4e6b2845b"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 01:58:33 crc kubenswrapper[4755]: I1124 01:58:33.296943 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://63caa319a5617588dfc38fc4ec055bba7992a0b755612b0a3d97cdb4e6b2845b" gracePeriod=600 Nov 24 01:58:34 crc kubenswrapper[4755]: I1124 01:58:34.388170 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="63caa319a5617588dfc38fc4ec055bba7992a0b755612b0a3d97cdb4e6b2845b" exitCode=0 Nov 24 01:58:34 crc kubenswrapper[4755]: I1124 01:58:34.388358 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"63caa319a5617588dfc38fc4ec055bba7992a0b755612b0a3d97cdb4e6b2845b"} Nov 24 01:58:34 crc kubenswrapper[4755]: I1124 01:58:34.388716 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7"} Nov 24 01:58:34 crc kubenswrapper[4755]: I1124 01:58:34.388753 4755 scope.go:117] "RemoveContainer" containerID="0aeb2d7c1c1f984667558e3150c104ac9b2367d080d7c585fb7a0be1668b341e" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.677727 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-w8kxp"] Nov 24 01:59:44 crc kubenswrapper[4755]: E1124 01:59:44.678567 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerName="extract-content" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.678582 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerName="extract-content" Nov 24 01:59:44 crc kubenswrapper[4755]: E1124 01:59:44.678620 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerName="registry-server" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.678626 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerName="registry-server" Nov 24 01:59:44 crc kubenswrapper[4755]: E1124 01:59:44.678653 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerName="extract-utilities" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.678660 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerName="extract-utilities" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.678878 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4977e72e-39fd-42d5-9017-4d9391d7392a" containerName="registry-server" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.680484 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.684529 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flkr5\" (UniqueName: \"kubernetes.io/projected/a47a15bc-3f2c-4bb7-b22a-31493e800434-kube-api-access-flkr5\") pod \"certified-operators-w8kxp\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.684578 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-utilities\") pod \"certified-operators-w8kxp\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.684713 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-catalog-content\") pod \"certified-operators-w8kxp\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.697258 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w8kxp"] Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.787021 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flkr5\" (UniqueName: \"kubernetes.io/projected/a47a15bc-3f2c-4bb7-b22a-31493e800434-kube-api-access-flkr5\") pod \"certified-operators-w8kxp\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.787367 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-utilities\") pod \"certified-operators-w8kxp\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.787440 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-catalog-content\") pod \"certified-operators-w8kxp\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.788023 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-catalog-content\") pod \"certified-operators-w8kxp\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.788397 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-utilities\") pod \"certified-operators-w8kxp\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:44 crc kubenswrapper[4755]: I1124 01:59:44.817678 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flkr5\" (UniqueName: \"kubernetes.io/projected/a47a15bc-3f2c-4bb7-b22a-31493e800434-kube-api-access-flkr5\") pod \"certified-operators-w8kxp\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:45 crc kubenswrapper[4755]: I1124 01:59:45.003085 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:45 crc kubenswrapper[4755]: I1124 01:59:45.587879 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w8kxp"] Nov 24 01:59:46 crc kubenswrapper[4755]: I1124 01:59:46.263711 4755 generic.go:334] "Generic (PLEG): container finished" podID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerID="9355c86e9a0d9fd599025973c35d3838c5f08b5319ef484d0fee381a12703a0b" exitCode=0 Nov 24 01:59:46 crc kubenswrapper[4755]: I1124 01:59:46.263794 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w8kxp" event={"ID":"a47a15bc-3f2c-4bb7-b22a-31493e800434","Type":"ContainerDied","Data":"9355c86e9a0d9fd599025973c35d3838c5f08b5319ef484d0fee381a12703a0b"} Nov 24 01:59:46 crc kubenswrapper[4755]: I1124 01:59:46.264018 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w8kxp" event={"ID":"a47a15bc-3f2c-4bb7-b22a-31493e800434","Type":"ContainerStarted","Data":"9cfb53d3fd3b1bfdb7d86f5d4bda0357a29977a6c56cb191c4247f6a2f786955"} Nov 24 01:59:47 crc kubenswrapper[4755]: I1124 01:59:47.277346 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w8kxp" event={"ID":"a47a15bc-3f2c-4bb7-b22a-31493e800434","Type":"ContainerStarted","Data":"a737a2f62e2ec8ed158fba50cb2fa83ddca7b7156b3a01710d822ae03fab13c1"} Nov 24 01:59:48 crc kubenswrapper[4755]: I1124 01:59:48.289416 4755 generic.go:334] "Generic (PLEG): container finished" podID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerID="a737a2f62e2ec8ed158fba50cb2fa83ddca7b7156b3a01710d822ae03fab13c1" exitCode=0 Nov 24 01:59:48 crc kubenswrapper[4755]: I1124 01:59:48.289526 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w8kxp" event={"ID":"a47a15bc-3f2c-4bb7-b22a-31493e800434","Type":"ContainerDied","Data":"a737a2f62e2ec8ed158fba50cb2fa83ddca7b7156b3a01710d822ae03fab13c1"} Nov 24 01:59:49 crc kubenswrapper[4755]: I1124 01:59:49.300035 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w8kxp" event={"ID":"a47a15bc-3f2c-4bb7-b22a-31493e800434","Type":"ContainerStarted","Data":"c5753921750a76c3056908fa5c15f158ab2043881e31fb262f7cd7c83e733edb"} Nov 24 01:59:49 crc kubenswrapper[4755]: I1124 01:59:49.324950 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-w8kxp" podStartSLOduration=2.924156241 podStartE2EDuration="5.324923554s" podCreationTimestamp="2025-11-24 01:59:44 +0000 UTC" firstStartedPulling="2025-11-24 01:59:46.265728125 +0000 UTC m=+2810.951793646" lastFinishedPulling="2025-11-24 01:59:48.666495437 +0000 UTC m=+2813.352560959" observedRunningTime="2025-11-24 01:59:49.319353998 +0000 UTC m=+2814.005419499" watchObservedRunningTime="2025-11-24 01:59:49.324923554 +0000 UTC m=+2814.010989045" Nov 24 01:59:55 crc kubenswrapper[4755]: I1124 01:59:55.003530 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:55 crc kubenswrapper[4755]: I1124 01:59:55.004081 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:55 crc kubenswrapper[4755]: I1124 01:59:55.058630 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:55 crc kubenswrapper[4755]: I1124 01:59:55.443320 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:55 crc kubenswrapper[4755]: I1124 01:59:55.499245 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w8kxp"] Nov 24 01:59:57 crc kubenswrapper[4755]: I1124 01:59:57.395109 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-w8kxp" podUID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerName="registry-server" containerID="cri-o://c5753921750a76c3056908fa5c15f158ab2043881e31fb262f7cd7c83e733edb" gracePeriod=2 Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.405849 4755 generic.go:334] "Generic (PLEG): container finished" podID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerID="c5753921750a76c3056908fa5c15f158ab2043881e31fb262f7cd7c83e733edb" exitCode=0 Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.405927 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w8kxp" event={"ID":"a47a15bc-3f2c-4bb7-b22a-31493e800434","Type":"ContainerDied","Data":"c5753921750a76c3056908fa5c15f158ab2043881e31fb262f7cd7c83e733edb"} Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.406310 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w8kxp" event={"ID":"a47a15bc-3f2c-4bb7-b22a-31493e800434","Type":"ContainerDied","Data":"9cfb53d3fd3b1bfdb7d86f5d4bda0357a29977a6c56cb191c4247f6a2f786955"} Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.406331 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cfb53d3fd3b1bfdb7d86f5d4bda0357a29977a6c56cb191c4247f6a2f786955" Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.490268 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.680502 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-catalog-content\") pod \"a47a15bc-3f2c-4bb7-b22a-31493e800434\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.680574 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-utilities\") pod \"a47a15bc-3f2c-4bb7-b22a-31493e800434\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.680636 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flkr5\" (UniqueName: \"kubernetes.io/projected/a47a15bc-3f2c-4bb7-b22a-31493e800434-kube-api-access-flkr5\") pod \"a47a15bc-3f2c-4bb7-b22a-31493e800434\" (UID: \"a47a15bc-3f2c-4bb7-b22a-31493e800434\") " Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.681574 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-utilities" (OuterVolumeSpecName: "utilities") pod "a47a15bc-3f2c-4bb7-b22a-31493e800434" (UID: "a47a15bc-3f2c-4bb7-b22a-31493e800434"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.685930 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a47a15bc-3f2c-4bb7-b22a-31493e800434-kube-api-access-flkr5" (OuterVolumeSpecName: "kube-api-access-flkr5") pod "a47a15bc-3f2c-4bb7-b22a-31493e800434" (UID: "a47a15bc-3f2c-4bb7-b22a-31493e800434"). InnerVolumeSpecName "kube-api-access-flkr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.726701 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a47a15bc-3f2c-4bb7-b22a-31493e800434" (UID: "a47a15bc-3f2c-4bb7-b22a-31493e800434"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.783482 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.783519 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a47a15bc-3f2c-4bb7-b22a-31493e800434-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 01:59:58 crc kubenswrapper[4755]: I1124 01:59:58.783529 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flkr5\" (UniqueName: \"kubernetes.io/projected/a47a15bc-3f2c-4bb7-b22a-31493e800434-kube-api-access-flkr5\") on node \"crc\" DevicePath \"\"" Nov 24 01:59:59 crc kubenswrapper[4755]: I1124 01:59:59.415363 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w8kxp" Nov 24 01:59:59 crc kubenswrapper[4755]: I1124 01:59:59.447941 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w8kxp"] Nov 24 01:59:59 crc kubenswrapper[4755]: I1124 01:59:59.455073 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-w8kxp"] Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.009126 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a47a15bc-3f2c-4bb7-b22a-31493e800434" path="/var/lib/kubelet/pods/a47a15bc-3f2c-4bb7-b22a-31493e800434/volumes" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.152180 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl"] Nov 24 02:00:00 crc kubenswrapper[4755]: E1124 02:00:00.152941 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerName="extract-utilities" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.152957 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerName="extract-utilities" Nov 24 02:00:00 crc kubenswrapper[4755]: E1124 02:00:00.153002 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerName="registry-server" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.153015 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerName="registry-server" Nov 24 02:00:00 crc kubenswrapper[4755]: E1124 02:00:00.153044 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerName="extract-content" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.153053 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerName="extract-content" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.153219 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a47a15bc-3f2c-4bb7-b22a-31493e800434" containerName="registry-server" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.153843 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.155540 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.155991 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.167293 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl"] Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.309018 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8612b138-6643-4025-8d13-a27cb04d326f-config-volume\") pod \"collect-profiles-29399160-mvrxl\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.309113 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8612b138-6643-4025-8d13-a27cb04d326f-secret-volume\") pod \"collect-profiles-29399160-mvrxl\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.309196 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp24k\" (UniqueName: \"kubernetes.io/projected/8612b138-6643-4025-8d13-a27cb04d326f-kube-api-access-qp24k\") pod \"collect-profiles-29399160-mvrxl\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.411274 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8612b138-6643-4025-8d13-a27cb04d326f-secret-volume\") pod \"collect-profiles-29399160-mvrxl\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.411389 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp24k\" (UniqueName: \"kubernetes.io/projected/8612b138-6643-4025-8d13-a27cb04d326f-kube-api-access-qp24k\") pod \"collect-profiles-29399160-mvrxl\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.411572 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8612b138-6643-4025-8d13-a27cb04d326f-config-volume\") pod \"collect-profiles-29399160-mvrxl\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.416887 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8612b138-6643-4025-8d13-a27cb04d326f-config-volume\") pod \"collect-profiles-29399160-mvrxl\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.423969 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8612b138-6643-4025-8d13-a27cb04d326f-secret-volume\") pod \"collect-profiles-29399160-mvrxl\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.440512 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp24k\" (UniqueName: \"kubernetes.io/projected/8612b138-6643-4025-8d13-a27cb04d326f-kube-api-access-qp24k\") pod \"collect-profiles-29399160-mvrxl\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.476138 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:00 crc kubenswrapper[4755]: I1124 02:00:00.955916 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl"] Nov 24 02:00:01 crc kubenswrapper[4755]: I1124 02:00:01.436192 4755 generic.go:334] "Generic (PLEG): container finished" podID="8612b138-6643-4025-8d13-a27cb04d326f" containerID="d1f63e190bfd0085b098ff7cbe6e2e1cf39d71cfc765d89439fe7fa97a7a59f8" exitCode=0 Nov 24 02:00:01 crc kubenswrapper[4755]: I1124 02:00:01.436253 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" event={"ID":"8612b138-6643-4025-8d13-a27cb04d326f","Type":"ContainerDied","Data":"d1f63e190bfd0085b098ff7cbe6e2e1cf39d71cfc765d89439fe7fa97a7a59f8"} Nov 24 02:00:01 crc kubenswrapper[4755]: I1124 02:00:01.436547 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" event={"ID":"8612b138-6643-4025-8d13-a27cb04d326f","Type":"ContainerStarted","Data":"a6e05f94d0afaedbe970bdf840457075ead566172ee8e3a8cfcf162005393251"} Nov 24 02:00:02 crc kubenswrapper[4755]: I1124 02:00:02.872396 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.067540 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8612b138-6643-4025-8d13-a27cb04d326f-config-volume\") pod \"8612b138-6643-4025-8d13-a27cb04d326f\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.068028 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qp24k\" (UniqueName: \"kubernetes.io/projected/8612b138-6643-4025-8d13-a27cb04d326f-kube-api-access-qp24k\") pod \"8612b138-6643-4025-8d13-a27cb04d326f\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.068073 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8612b138-6643-4025-8d13-a27cb04d326f-secret-volume\") pod \"8612b138-6643-4025-8d13-a27cb04d326f\" (UID: \"8612b138-6643-4025-8d13-a27cb04d326f\") " Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.068406 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8612b138-6643-4025-8d13-a27cb04d326f-config-volume" (OuterVolumeSpecName: "config-volume") pod "8612b138-6643-4025-8d13-a27cb04d326f" (UID: "8612b138-6643-4025-8d13-a27cb04d326f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.073332 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8612b138-6643-4025-8d13-a27cb04d326f-kube-api-access-qp24k" (OuterVolumeSpecName: "kube-api-access-qp24k") pod "8612b138-6643-4025-8d13-a27cb04d326f" (UID: "8612b138-6643-4025-8d13-a27cb04d326f"). InnerVolumeSpecName "kube-api-access-qp24k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.075786 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8612b138-6643-4025-8d13-a27cb04d326f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8612b138-6643-4025-8d13-a27cb04d326f" (UID: "8612b138-6643-4025-8d13-a27cb04d326f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.170657 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8612b138-6643-4025-8d13-a27cb04d326f-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.170701 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qp24k\" (UniqueName: \"kubernetes.io/projected/8612b138-6643-4025-8d13-a27cb04d326f-kube-api-access-qp24k\") on node \"crc\" DevicePath \"\"" Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.170714 4755 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8612b138-6643-4025-8d13-a27cb04d326f-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.455702 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" event={"ID":"8612b138-6643-4025-8d13-a27cb04d326f","Type":"ContainerDied","Data":"a6e05f94d0afaedbe970bdf840457075ead566172ee8e3a8cfcf162005393251"} Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.455758 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6e05f94d0afaedbe970bdf840457075ead566172ee8e3a8cfcf162005393251" Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.455788 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399160-mvrxl" Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.943130 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5"] Nov 24 02:00:03 crc kubenswrapper[4755]: I1124 02:00:03.951155 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399115-rt4z5"] Nov 24 02:00:04 crc kubenswrapper[4755]: I1124 02:00:04.011249 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6623318-d2a9-4015-b310-96a7506f61f9" path="/var/lib/kubelet/pods/e6623318-d2a9-4015-b310-96a7506f61f9/volumes" Nov 24 02:00:04 crc kubenswrapper[4755]: I1124 02:00:04.414240 4755 scope.go:117] "RemoveContainer" containerID="1b41606d191efd72dfabdf8a9615d0ad8b4816b30272819a227c28f1e9382fa3" Nov 24 02:00:33 crc kubenswrapper[4755]: I1124 02:00:33.294967 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:00:33 crc kubenswrapper[4755]: I1124 02:00:33.295686 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.151400 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29399161-hl9nq"] Nov 24 02:01:00 crc kubenswrapper[4755]: E1124 02:01:00.152247 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8612b138-6643-4025-8d13-a27cb04d326f" containerName="collect-profiles" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.152260 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="8612b138-6643-4025-8d13-a27cb04d326f" containerName="collect-profiles" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.152436 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="8612b138-6643-4025-8d13-a27cb04d326f" containerName="collect-profiles" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.153134 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.170070 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29399161-hl9nq"] Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.198339 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-config-data\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.198391 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt6rt\" (UniqueName: \"kubernetes.io/projected/97f32270-c319-4ef7-9784-1a63c16a0164-kube-api-access-wt6rt\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.198710 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-fernet-keys\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.198776 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-combined-ca-bundle\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.300502 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-fernet-keys\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.300589 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-combined-ca-bundle\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.300878 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-config-data\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.300922 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt6rt\" (UniqueName: \"kubernetes.io/projected/97f32270-c319-4ef7-9784-1a63c16a0164-kube-api-access-wt6rt\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.311576 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-fernet-keys\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.312106 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-config-data\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.312313 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-combined-ca-bundle\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.320518 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt6rt\" (UniqueName: \"kubernetes.io/projected/97f32270-c319-4ef7-9784-1a63c16a0164-kube-api-access-wt6rt\") pod \"keystone-cron-29399161-hl9nq\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.471924 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:00 crc kubenswrapper[4755]: I1124 02:01:00.974746 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29399161-hl9nq"] Nov 24 02:01:01 crc kubenswrapper[4755]: I1124 02:01:01.031350 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29399161-hl9nq" event={"ID":"97f32270-c319-4ef7-9784-1a63c16a0164","Type":"ContainerStarted","Data":"35f5816145c375acb84c874bbf18df35c65ff175dd1c45751e810742b4605680"} Nov 24 02:01:02 crc kubenswrapper[4755]: I1124 02:01:02.044974 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29399161-hl9nq" event={"ID":"97f32270-c319-4ef7-9784-1a63c16a0164","Type":"ContainerStarted","Data":"8fbc8b6d384061445557f7dcfde7a46825284da9d8fc026d2b8d0e8bf7a9ab4f"} Nov 24 02:01:02 crc kubenswrapper[4755]: I1124 02:01:02.072887 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29399161-hl9nq" podStartSLOduration=2.072860746 podStartE2EDuration="2.072860746s" podCreationTimestamp="2025-11-24 02:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 02:01:02.067817194 +0000 UTC m=+2886.753882695" watchObservedRunningTime="2025-11-24 02:01:02.072860746 +0000 UTC m=+2886.758926247" Nov 24 02:01:03 crc kubenswrapper[4755]: I1124 02:01:03.295393 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:01:03 crc kubenswrapper[4755]: I1124 02:01:03.296242 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:01:04 crc kubenswrapper[4755]: I1124 02:01:04.064484 4755 generic.go:334] "Generic (PLEG): container finished" podID="97f32270-c319-4ef7-9784-1a63c16a0164" containerID="8fbc8b6d384061445557f7dcfde7a46825284da9d8fc026d2b8d0e8bf7a9ab4f" exitCode=0 Nov 24 02:01:04 crc kubenswrapper[4755]: I1124 02:01:04.064634 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29399161-hl9nq" event={"ID":"97f32270-c319-4ef7-9784-1a63c16a0164","Type":"ContainerDied","Data":"8fbc8b6d384061445557f7dcfde7a46825284da9d8fc026d2b8d0e8bf7a9ab4f"} Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.462076 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.512844 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-config-data\") pod \"97f32270-c319-4ef7-9784-1a63c16a0164\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.512913 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wt6rt\" (UniqueName: \"kubernetes.io/projected/97f32270-c319-4ef7-9784-1a63c16a0164-kube-api-access-wt6rt\") pod \"97f32270-c319-4ef7-9784-1a63c16a0164\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.513056 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-combined-ca-bundle\") pod \"97f32270-c319-4ef7-9784-1a63c16a0164\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.513687 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-fernet-keys\") pod \"97f32270-c319-4ef7-9784-1a63c16a0164\" (UID: \"97f32270-c319-4ef7-9784-1a63c16a0164\") " Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.518661 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "97f32270-c319-4ef7-9784-1a63c16a0164" (UID: "97f32270-c319-4ef7-9784-1a63c16a0164"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.527789 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97f32270-c319-4ef7-9784-1a63c16a0164-kube-api-access-wt6rt" (OuterVolumeSpecName: "kube-api-access-wt6rt") pod "97f32270-c319-4ef7-9784-1a63c16a0164" (UID: "97f32270-c319-4ef7-9784-1a63c16a0164"). InnerVolumeSpecName "kube-api-access-wt6rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.544647 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97f32270-c319-4ef7-9784-1a63c16a0164" (UID: "97f32270-c319-4ef7-9784-1a63c16a0164"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.574890 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-config-data" (OuterVolumeSpecName: "config-data") pod "97f32270-c319-4ef7-9784-1a63c16a0164" (UID: "97f32270-c319-4ef7-9784-1a63c16a0164"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.616125 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.616160 4755 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.616169 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f32270-c319-4ef7-9784-1a63c16a0164-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 02:01:05 crc kubenswrapper[4755]: I1124 02:01:05.616179 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wt6rt\" (UniqueName: \"kubernetes.io/projected/97f32270-c319-4ef7-9784-1a63c16a0164-kube-api-access-wt6rt\") on node \"crc\" DevicePath \"\"" Nov 24 02:01:06 crc kubenswrapper[4755]: I1124 02:01:06.088244 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29399161-hl9nq" event={"ID":"97f32270-c319-4ef7-9784-1a63c16a0164","Type":"ContainerDied","Data":"35f5816145c375acb84c874bbf18df35c65ff175dd1c45751e810742b4605680"} Nov 24 02:01:06 crc kubenswrapper[4755]: I1124 02:01:06.088326 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35f5816145c375acb84c874bbf18df35c65ff175dd1c45751e810742b4605680" Nov 24 02:01:06 crc kubenswrapper[4755]: I1124 02:01:06.088355 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29399161-hl9nq" Nov 24 02:01:33 crc kubenswrapper[4755]: I1124 02:01:33.294844 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:01:33 crc kubenswrapper[4755]: I1124 02:01:33.295659 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:01:33 crc kubenswrapper[4755]: I1124 02:01:33.295709 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 02:01:33 crc kubenswrapper[4755]: I1124 02:01:33.296357 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 02:01:33 crc kubenswrapper[4755]: I1124 02:01:33.296447 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" gracePeriod=600 Nov 24 02:01:33 crc kubenswrapper[4755]: E1124 02:01:33.420618 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:01:34 crc kubenswrapper[4755]: I1124 02:01:34.360840 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" exitCode=0 Nov 24 02:01:34 crc kubenswrapper[4755]: I1124 02:01:34.360876 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7"} Nov 24 02:01:34 crc kubenswrapper[4755]: I1124 02:01:34.361348 4755 scope.go:117] "RemoveContainer" containerID="63caa319a5617588dfc38fc4ec055bba7992a0b755612b0a3d97cdb4e6b2845b" Nov 24 02:01:34 crc kubenswrapper[4755]: I1124 02:01:34.362095 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:01:34 crc kubenswrapper[4755]: E1124 02:01:34.362401 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:01:48 crc kubenswrapper[4755]: I1124 02:01:48.997288 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:01:48 crc kubenswrapper[4755]: E1124 02:01:48.998248 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:01:59 crc kubenswrapper[4755]: I1124 02:01:59.996435 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:01:59 crc kubenswrapper[4755]: E1124 02:01:59.997180 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:02:14 crc kubenswrapper[4755]: I1124 02:02:14.996781 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:02:14 crc kubenswrapper[4755]: E1124 02:02:14.997536 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:02:26 crc kubenswrapper[4755]: I1124 02:02:26.010453 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:02:26 crc kubenswrapper[4755]: E1124 02:02:26.011234 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:02:40 crc kubenswrapper[4755]: I1124 02:02:40.997537 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:02:40 crc kubenswrapper[4755]: E1124 02:02:40.998677 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:02:52 crc kubenswrapper[4755]: I1124 02:02:52.996636 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:02:52 crc kubenswrapper[4755]: E1124 02:02:52.997362 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:03:06 crc kubenswrapper[4755]: I1124 02:03:06.026980 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:03:06 crc kubenswrapper[4755]: E1124 02:03:06.028974 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:03:19 crc kubenswrapper[4755]: I1124 02:03:19.998630 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:03:20 crc kubenswrapper[4755]: E1124 02:03:19.999352 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:03:34 crc kubenswrapper[4755]: I1124 02:03:34.997112 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:03:34 crc kubenswrapper[4755]: E1124 02:03:34.997818 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:03:48 crc kubenswrapper[4755]: I1124 02:03:48.996731 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:03:48 crc kubenswrapper[4755]: E1124 02:03:48.997454 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:03:59 crc kubenswrapper[4755]: I1124 02:03:59.996639 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:03:59 crc kubenswrapper[4755]: E1124 02:03:59.997358 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.116530 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4j2hj"] Nov 24 02:04:06 crc kubenswrapper[4755]: E1124 02:04:06.117514 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97f32270-c319-4ef7-9784-1a63c16a0164" containerName="keystone-cron" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.117531 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="97f32270-c319-4ef7-9784-1a63c16a0164" containerName="keystone-cron" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.117837 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="97f32270-c319-4ef7-9784-1a63c16a0164" containerName="keystone-cron" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.119379 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.129996 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4j2hj"] Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.153895 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-utilities\") pod \"redhat-marketplace-4j2hj\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.153982 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-catalog-content\") pod \"redhat-marketplace-4j2hj\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.154021 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnq7l\" (UniqueName: \"kubernetes.io/projected/3cb5bb09-a715-464a-9314-28e1310d1f2e-kube-api-access-nnq7l\") pod \"redhat-marketplace-4j2hj\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.256082 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-utilities\") pod \"redhat-marketplace-4j2hj\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.256461 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-catalog-content\") pod \"redhat-marketplace-4j2hj\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.256644 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-utilities\") pod \"redhat-marketplace-4j2hj\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.256663 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnq7l\" (UniqueName: \"kubernetes.io/projected/3cb5bb09-a715-464a-9314-28e1310d1f2e-kube-api-access-nnq7l\") pod \"redhat-marketplace-4j2hj\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.256952 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-catalog-content\") pod \"redhat-marketplace-4j2hj\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.277921 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnq7l\" (UniqueName: \"kubernetes.io/projected/3cb5bb09-a715-464a-9314-28e1310d1f2e-kube-api-access-nnq7l\") pod \"redhat-marketplace-4j2hj\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.314949 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rdgmh"] Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.330805 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rdgmh"] Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.331136 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.360007 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-catalog-content\") pod \"community-operators-rdgmh\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.360299 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-utilities\") pod \"community-operators-rdgmh\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.360452 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fgt2\" (UniqueName: \"kubernetes.io/projected/0ddc94ef-cc64-4281-bab5-087645764186-kube-api-access-2fgt2\") pod \"community-operators-rdgmh\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.437571 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.461578 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-catalog-content\") pod \"community-operators-rdgmh\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.461917 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-utilities\") pod \"community-operators-rdgmh\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.462276 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fgt2\" (UniqueName: \"kubernetes.io/projected/0ddc94ef-cc64-4281-bab5-087645764186-kube-api-access-2fgt2\") pod \"community-operators-rdgmh\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.462383 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-catalog-content\") pod \"community-operators-rdgmh\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.462455 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-utilities\") pod \"community-operators-rdgmh\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.487956 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fgt2\" (UniqueName: \"kubernetes.io/projected/0ddc94ef-cc64-4281-bab5-087645764186-kube-api-access-2fgt2\") pod \"community-operators-rdgmh\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.656437 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:06 crc kubenswrapper[4755]: I1124 02:04:06.951259 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4j2hj"] Nov 24 02:04:07 crc kubenswrapper[4755]: I1124 02:04:07.022794 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4j2hj" event={"ID":"3cb5bb09-a715-464a-9314-28e1310d1f2e","Type":"ContainerStarted","Data":"72222da5d66cf8e98c013a2231b7dce6c6b6f560502f1b0dcefbff53115ec820"} Nov 24 02:04:07 crc kubenswrapper[4755]: I1124 02:04:07.245256 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rdgmh"] Nov 24 02:04:07 crc kubenswrapper[4755]: W1124 02:04:07.293954 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ddc94ef_cc64_4281_bab5_087645764186.slice/crio-6b4a1b3f49da9c07a5b655b01da25ff1476ba3d1a36f9a06f350c166162e63ab WatchSource:0}: Error finding container 6b4a1b3f49da9c07a5b655b01da25ff1476ba3d1a36f9a06f350c166162e63ab: Status 404 returned error can't find the container with id 6b4a1b3f49da9c07a5b655b01da25ff1476ba3d1a36f9a06f350c166162e63ab Nov 24 02:04:08 crc kubenswrapper[4755]: I1124 02:04:08.034215 4755 generic.go:334] "Generic (PLEG): container finished" podID="0ddc94ef-cc64-4281-bab5-087645764186" containerID="57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b" exitCode=0 Nov 24 02:04:08 crc kubenswrapper[4755]: I1124 02:04:08.034293 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdgmh" event={"ID":"0ddc94ef-cc64-4281-bab5-087645764186","Type":"ContainerDied","Data":"57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b"} Nov 24 02:04:08 crc kubenswrapper[4755]: I1124 02:04:08.034325 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdgmh" event={"ID":"0ddc94ef-cc64-4281-bab5-087645764186","Type":"ContainerStarted","Data":"6b4a1b3f49da9c07a5b655b01da25ff1476ba3d1a36f9a06f350c166162e63ab"} Nov 24 02:04:08 crc kubenswrapper[4755]: I1124 02:04:08.036830 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 02:04:08 crc kubenswrapper[4755]: I1124 02:04:08.038002 4755 generic.go:334] "Generic (PLEG): container finished" podID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerID="fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4" exitCode=0 Nov 24 02:04:08 crc kubenswrapper[4755]: I1124 02:04:08.038049 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4j2hj" event={"ID":"3cb5bb09-a715-464a-9314-28e1310d1f2e","Type":"ContainerDied","Data":"fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4"} Nov 24 02:04:09 crc kubenswrapper[4755]: I1124 02:04:09.053019 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdgmh" event={"ID":"0ddc94ef-cc64-4281-bab5-087645764186","Type":"ContainerStarted","Data":"bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad"} Nov 24 02:04:09 crc kubenswrapper[4755]: I1124 02:04:09.055277 4755 generic.go:334] "Generic (PLEG): container finished" podID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerID="ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b" exitCode=0 Nov 24 02:04:09 crc kubenswrapper[4755]: I1124 02:04:09.055327 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4j2hj" event={"ID":"3cb5bb09-a715-464a-9314-28e1310d1f2e","Type":"ContainerDied","Data":"ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b"} Nov 24 02:04:10 crc kubenswrapper[4755]: I1124 02:04:10.067202 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4j2hj" event={"ID":"3cb5bb09-a715-464a-9314-28e1310d1f2e","Type":"ContainerStarted","Data":"eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70"} Nov 24 02:04:10 crc kubenswrapper[4755]: I1124 02:04:10.070183 4755 generic.go:334] "Generic (PLEG): container finished" podID="0ddc94ef-cc64-4281-bab5-087645764186" containerID="bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad" exitCode=0 Nov 24 02:04:10 crc kubenswrapper[4755]: I1124 02:04:10.070240 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdgmh" event={"ID":"0ddc94ef-cc64-4281-bab5-087645764186","Type":"ContainerDied","Data":"bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad"} Nov 24 02:04:10 crc kubenswrapper[4755]: I1124 02:04:10.094037 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4j2hj" podStartSLOduration=2.696035928 podStartE2EDuration="4.094018295s" podCreationTimestamp="2025-11-24 02:04:06 +0000 UTC" firstStartedPulling="2025-11-24 02:04:08.039305938 +0000 UTC m=+3072.725371459" lastFinishedPulling="2025-11-24 02:04:09.437288325 +0000 UTC m=+3074.123353826" observedRunningTime="2025-11-24 02:04:10.089158978 +0000 UTC m=+3074.775224509" watchObservedRunningTime="2025-11-24 02:04:10.094018295 +0000 UTC m=+3074.780083796" Nov 24 02:04:11 crc kubenswrapper[4755]: I1124 02:04:11.082859 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdgmh" event={"ID":"0ddc94ef-cc64-4281-bab5-087645764186","Type":"ContainerStarted","Data":"03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a"} Nov 24 02:04:11 crc kubenswrapper[4755]: I1124 02:04:11.133627 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rdgmh" podStartSLOduration=2.697798147 podStartE2EDuration="5.133583826s" podCreationTimestamp="2025-11-24 02:04:06 +0000 UTC" firstStartedPulling="2025-11-24 02:04:08.036492369 +0000 UTC m=+3072.722557890" lastFinishedPulling="2025-11-24 02:04:10.472278068 +0000 UTC m=+3075.158343569" observedRunningTime="2025-11-24 02:04:11.127544176 +0000 UTC m=+3075.813609697" watchObservedRunningTime="2025-11-24 02:04:11.133583826 +0000 UTC m=+3075.819649327" Nov 24 02:04:14 crc kubenswrapper[4755]: I1124 02:04:14.997083 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:04:14 crc kubenswrapper[4755]: E1124 02:04:14.997966 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:04:16 crc kubenswrapper[4755]: I1124 02:04:16.438906 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:16 crc kubenswrapper[4755]: I1124 02:04:16.439204 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:16 crc kubenswrapper[4755]: I1124 02:04:16.482265 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:16 crc kubenswrapper[4755]: I1124 02:04:16.657307 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:16 crc kubenswrapper[4755]: I1124 02:04:16.657359 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:16 crc kubenswrapper[4755]: I1124 02:04:16.698746 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:17 crc kubenswrapper[4755]: I1124 02:04:17.212749 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:17 crc kubenswrapper[4755]: I1124 02:04:17.213521 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:18 crc kubenswrapper[4755]: I1124 02:04:18.122011 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4j2hj"] Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.151665 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4j2hj" podUID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerName="registry-server" containerID="cri-o://eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70" gracePeriod=2 Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.515168 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rdgmh"] Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.515758 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rdgmh" podUID="0ddc94ef-cc64-4281-bab5-087645764186" containerName="registry-server" containerID="cri-o://03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a" gracePeriod=2 Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.721910 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.856217 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-catalog-content\") pod \"3cb5bb09-a715-464a-9314-28e1310d1f2e\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.856317 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-utilities\") pod \"3cb5bb09-a715-464a-9314-28e1310d1f2e\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.856538 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnq7l\" (UniqueName: \"kubernetes.io/projected/3cb5bb09-a715-464a-9314-28e1310d1f2e-kube-api-access-nnq7l\") pod \"3cb5bb09-a715-464a-9314-28e1310d1f2e\" (UID: \"3cb5bb09-a715-464a-9314-28e1310d1f2e\") " Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.857555 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-utilities" (OuterVolumeSpecName: "utilities") pod "3cb5bb09-a715-464a-9314-28e1310d1f2e" (UID: "3cb5bb09-a715-464a-9314-28e1310d1f2e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.864403 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb5bb09-a715-464a-9314-28e1310d1f2e-kube-api-access-nnq7l" (OuterVolumeSpecName: "kube-api-access-nnq7l") pod "3cb5bb09-a715-464a-9314-28e1310d1f2e" (UID: "3cb5bb09-a715-464a-9314-28e1310d1f2e"). InnerVolumeSpecName "kube-api-access-nnq7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.877087 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3cb5bb09-a715-464a-9314-28e1310d1f2e" (UID: "3cb5bb09-a715-464a-9314-28e1310d1f2e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.958637 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnq7l\" (UniqueName: \"kubernetes.io/projected/3cb5bb09-a715-464a-9314-28e1310d1f2e-kube-api-access-nnq7l\") on node \"crc\" DevicePath \"\"" Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.958696 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.958705 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cb5bb09-a715-464a-9314-28e1310d1f2e-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 02:04:19 crc kubenswrapper[4755]: I1124 02:04:19.995324 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.059526 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-utilities\") pod \"0ddc94ef-cc64-4281-bab5-087645764186\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.059570 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fgt2\" (UniqueName: \"kubernetes.io/projected/0ddc94ef-cc64-4281-bab5-087645764186-kube-api-access-2fgt2\") pod \"0ddc94ef-cc64-4281-bab5-087645764186\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.059864 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-catalog-content\") pod \"0ddc94ef-cc64-4281-bab5-087645764186\" (UID: \"0ddc94ef-cc64-4281-bab5-087645764186\") " Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.061006 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-utilities" (OuterVolumeSpecName: "utilities") pod "0ddc94ef-cc64-4281-bab5-087645764186" (UID: "0ddc94ef-cc64-4281-bab5-087645764186"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.064167 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ddc94ef-cc64-4281-bab5-087645764186-kube-api-access-2fgt2" (OuterVolumeSpecName: "kube-api-access-2fgt2") pod "0ddc94ef-cc64-4281-bab5-087645764186" (UID: "0ddc94ef-cc64-4281-bab5-087645764186"). InnerVolumeSpecName "kube-api-access-2fgt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.109346 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ddc94ef-cc64-4281-bab5-087645764186" (UID: "0ddc94ef-cc64-4281-bab5-087645764186"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.162020 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.162064 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fgt2\" (UniqueName: \"kubernetes.io/projected/0ddc94ef-cc64-4281-bab5-087645764186-kube-api-access-2fgt2\") on node \"crc\" DevicePath \"\"" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.162081 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ddc94ef-cc64-4281-bab5-087645764186-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.166189 4755 generic.go:334] "Generic (PLEG): container finished" podID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerID="eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70" exitCode=0 Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.166257 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4j2hj" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.166243 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4j2hj" event={"ID":"3cb5bb09-a715-464a-9314-28e1310d1f2e","Type":"ContainerDied","Data":"eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70"} Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.166409 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4j2hj" event={"ID":"3cb5bb09-a715-464a-9314-28e1310d1f2e","Type":"ContainerDied","Data":"72222da5d66cf8e98c013a2231b7dce6c6b6f560502f1b0dcefbff53115ec820"} Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.166433 4755 scope.go:117] "RemoveContainer" containerID="eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.169192 4755 generic.go:334] "Generic (PLEG): container finished" podID="0ddc94ef-cc64-4281-bab5-087645764186" containerID="03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a" exitCode=0 Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.169218 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdgmh" event={"ID":"0ddc94ef-cc64-4281-bab5-087645764186","Type":"ContainerDied","Data":"03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a"} Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.169240 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdgmh" event={"ID":"0ddc94ef-cc64-4281-bab5-087645764186","Type":"ContainerDied","Data":"6b4a1b3f49da9c07a5b655b01da25ff1476ba3d1a36f9a06f350c166162e63ab"} Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.169293 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rdgmh" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.206221 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4j2hj"] Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.214406 4755 scope.go:117] "RemoveContainer" containerID="ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.217289 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4j2hj"] Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.225058 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rdgmh"] Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.233153 4755 scope.go:117] "RemoveContainer" containerID="fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.233274 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rdgmh"] Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.253277 4755 scope.go:117] "RemoveContainer" containerID="eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70" Nov 24 02:04:20 crc kubenswrapper[4755]: E1124 02:04:20.253798 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70\": container with ID starting with eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70 not found: ID does not exist" containerID="eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.253851 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70"} err="failed to get container status \"eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70\": rpc error: code = NotFound desc = could not find container \"eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70\": container with ID starting with eece282551f2df119e91107e0e277e282096b6886d9b6e464826acdaa499fe70 not found: ID does not exist" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.253875 4755 scope.go:117] "RemoveContainer" containerID="ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b" Nov 24 02:04:20 crc kubenswrapper[4755]: E1124 02:04:20.254108 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b\": container with ID starting with ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b not found: ID does not exist" containerID="ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.254129 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b"} err="failed to get container status \"ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b\": rpc error: code = NotFound desc = could not find container \"ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b\": container with ID starting with ab43b838862aa91eb7f177a1ce80c59fedccde6db18d46d9454b1d92fb183b1b not found: ID does not exist" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.254165 4755 scope.go:117] "RemoveContainer" containerID="fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4" Nov 24 02:04:20 crc kubenswrapper[4755]: E1124 02:04:20.254551 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4\": container with ID starting with fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4 not found: ID does not exist" containerID="fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.254588 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4"} err="failed to get container status \"fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4\": rpc error: code = NotFound desc = could not find container \"fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4\": container with ID starting with fe397fc168c76915108281d61326f4d0d0c057d1cbfa51f53dd2895dbd33cfc4 not found: ID does not exist" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.254672 4755 scope.go:117] "RemoveContainer" containerID="03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.337427 4755 scope.go:117] "RemoveContainer" containerID="bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.355887 4755 scope.go:117] "RemoveContainer" containerID="57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.403861 4755 scope.go:117] "RemoveContainer" containerID="03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a" Nov 24 02:04:20 crc kubenswrapper[4755]: E1124 02:04:20.404321 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a\": container with ID starting with 03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a not found: ID does not exist" containerID="03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.404351 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a"} err="failed to get container status \"03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a\": rpc error: code = NotFound desc = could not find container \"03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a\": container with ID starting with 03bd559e86d34277a0581096fd2b8bfa218ffc22db7024bf2cdde225f240b17a not found: ID does not exist" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.404374 4755 scope.go:117] "RemoveContainer" containerID="bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad" Nov 24 02:04:20 crc kubenswrapper[4755]: E1124 02:04:20.404875 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad\": container with ID starting with bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad not found: ID does not exist" containerID="bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.404936 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad"} err="failed to get container status \"bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad\": rpc error: code = NotFound desc = could not find container \"bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad\": container with ID starting with bca7a1d0469c9f42b4e63522fe6032d50468a8c3401aec5a53a5d0dd109950ad not found: ID does not exist" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.404971 4755 scope.go:117] "RemoveContainer" containerID="57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b" Nov 24 02:04:20 crc kubenswrapper[4755]: E1124 02:04:20.405433 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b\": container with ID starting with 57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b not found: ID does not exist" containerID="57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b" Nov 24 02:04:20 crc kubenswrapper[4755]: I1124 02:04:20.405466 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b"} err="failed to get container status \"57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b\": rpc error: code = NotFound desc = could not find container \"57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b\": container with ID starting with 57c40c5949807ea69e87db376bb660071b8b60fc5d00080e86e818d27bf0d63b not found: ID does not exist" Nov 24 02:04:22 crc kubenswrapper[4755]: I1124 02:04:22.008910 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ddc94ef-cc64-4281-bab5-087645764186" path="/var/lib/kubelet/pods/0ddc94ef-cc64-4281-bab5-087645764186/volumes" Nov 24 02:04:22 crc kubenswrapper[4755]: I1124 02:04:22.009742 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb5bb09-a715-464a-9314-28e1310d1f2e" path="/var/lib/kubelet/pods/3cb5bb09-a715-464a-9314-28e1310d1f2e/volumes" Nov 24 02:04:27 crc kubenswrapper[4755]: I1124 02:04:27.997242 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:04:27 crc kubenswrapper[4755]: E1124 02:04:27.998158 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:04:40 crc kubenswrapper[4755]: I1124 02:04:40.997126 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:04:40 crc kubenswrapper[4755]: E1124 02:04:40.997782 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:04:53 crc kubenswrapper[4755]: I1124 02:04:53.996726 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:04:53 crc kubenswrapper[4755]: E1124 02:04:53.999194 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:05:07 crc kubenswrapper[4755]: I1124 02:05:07.997394 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:05:07 crc kubenswrapper[4755]: E1124 02:05:07.998222 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:05:20 crc kubenswrapper[4755]: I1124 02:05:20.996538 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:05:20 crc kubenswrapper[4755]: E1124 02:05:20.997292 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:05:31 crc kubenswrapper[4755]: I1124 02:05:31.998030 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:05:31 crc kubenswrapper[4755]: E1124 02:05:31.998813 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:05:46 crc kubenswrapper[4755]: I1124 02:05:46.004541 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:05:46 crc kubenswrapper[4755]: E1124 02:05:46.005507 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:05:58 crc kubenswrapper[4755]: I1124 02:05:58.996996 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:05:58 crc kubenswrapper[4755]: E1124 02:05:58.998107 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:06:04 crc kubenswrapper[4755]: I1124 02:06:04.602634 4755 scope.go:117] "RemoveContainer" containerID="a737a2f62e2ec8ed158fba50cb2fa83ddca7b7156b3a01710d822ae03fab13c1" Nov 24 02:06:04 crc kubenswrapper[4755]: I1124 02:06:04.628658 4755 scope.go:117] "RemoveContainer" containerID="9355c86e9a0d9fd599025973c35d3838c5f08b5319ef484d0fee381a12703a0b" Nov 24 02:06:04 crc kubenswrapper[4755]: I1124 02:06:04.692521 4755 scope.go:117] "RemoveContainer" containerID="c5753921750a76c3056908fa5c15f158ab2043881e31fb262f7cd7c83e733edb" Nov 24 02:06:09 crc kubenswrapper[4755]: I1124 02:06:09.997339 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:06:09 crc kubenswrapper[4755]: E1124 02:06:09.998282 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:06:21 crc kubenswrapper[4755]: I1124 02:06:21.996712 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:06:21 crc kubenswrapper[4755]: E1124 02:06:21.997664 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:06:34 crc kubenswrapper[4755]: I1124 02:06:34.996700 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:06:36 crc kubenswrapper[4755]: I1124 02:06:36.084191 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"8d58e541d2b2ce30c0dc08c9a1783324e6dc27150018b18022e3de27e7efa390"} Nov 24 02:06:41 crc kubenswrapper[4755]: I1124 02:06:41.137694 4755 generic.go:334] "Generic (PLEG): container finished" podID="23d50e60-91da-42c3-8d11-5c22eab88929" containerID="87b3a178f91345ecb7a9925054b60644de8b8680a70e7e8a75b7f1e98581bcdb" exitCode=0 Nov 24 02:06:41 crc kubenswrapper[4755]: I1124 02:06:41.137846 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"23d50e60-91da-42c3-8d11-5c22eab88929","Type":"ContainerDied","Data":"87b3a178f91345ecb7a9925054b60644de8b8680a70e7e8a75b7f1e98581bcdb"} Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.540068 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.647913 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ca-certs\") pod \"23d50e60-91da-42c3-8d11-5c22eab88929\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.648276 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-workdir\") pod \"23d50e60-91da-42c3-8d11-5c22eab88929\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.648422 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jv5x6\" (UniqueName: \"kubernetes.io/projected/23d50e60-91da-42c3-8d11-5c22eab88929-kube-api-access-jv5x6\") pod \"23d50e60-91da-42c3-8d11-5c22eab88929\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.648544 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-config-data\") pod \"23d50e60-91da-42c3-8d11-5c22eab88929\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.648827 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config\") pod \"23d50e60-91da-42c3-8d11-5c22eab88929\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.649257 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-temporary\") pod \"23d50e60-91da-42c3-8d11-5c22eab88929\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.649511 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"23d50e60-91da-42c3-8d11-5c22eab88929\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.649666 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config-secret\") pod \"23d50e60-91da-42c3-8d11-5c22eab88929\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.649786 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "23d50e60-91da-42c3-8d11-5c22eab88929" (UID: "23d50e60-91da-42c3-8d11-5c22eab88929"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.649822 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ssh-key\") pod \"23d50e60-91da-42c3-8d11-5c22eab88929\" (UID: \"23d50e60-91da-42c3-8d11-5c22eab88929\") " Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.650281 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-config-data" (OuterVolumeSpecName: "config-data") pod "23d50e60-91da-42c3-8d11-5c22eab88929" (UID: "23d50e60-91da-42c3-8d11-5c22eab88929"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.650546 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.650666 4755 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.654712 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "test-operator-logs") pod "23d50e60-91da-42c3-8d11-5c22eab88929" (UID: "23d50e60-91da-42c3-8d11-5c22eab88929"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.655261 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23d50e60-91da-42c3-8d11-5c22eab88929-kube-api-access-jv5x6" (OuterVolumeSpecName: "kube-api-access-jv5x6") pod "23d50e60-91da-42c3-8d11-5c22eab88929" (UID: "23d50e60-91da-42c3-8d11-5c22eab88929"). InnerVolumeSpecName "kube-api-access-jv5x6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.655852 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "23d50e60-91da-42c3-8d11-5c22eab88929" (UID: "23d50e60-91da-42c3-8d11-5c22eab88929"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.675305 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "23d50e60-91da-42c3-8d11-5c22eab88929" (UID: "23d50e60-91da-42c3-8d11-5c22eab88929"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.676181 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "23d50e60-91da-42c3-8d11-5c22eab88929" (UID: "23d50e60-91da-42c3-8d11-5c22eab88929"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.677142 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "23d50e60-91da-42c3-8d11-5c22eab88929" (UID: "23d50e60-91da-42c3-8d11-5c22eab88929"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.701471 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "23d50e60-91da-42c3-8d11-5c22eab88929" (UID: "23d50e60-91da-42c3-8d11-5c22eab88929"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.752780 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.752821 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.752832 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.752841 4755 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/23d50e60-91da-42c3-8d11-5c22eab88929-ca-certs\") on node \"crc\" DevicePath \"\"" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.752850 4755 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/23d50e60-91da-42c3-8d11-5c22eab88929-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.753050 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jv5x6\" (UniqueName: \"kubernetes.io/projected/23d50e60-91da-42c3-8d11-5c22eab88929-kube-api-access-jv5x6\") on node \"crc\" DevicePath \"\"" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.753063 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/23d50e60-91da-42c3-8d11-5c22eab88929-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.773626 4755 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Nov 24 02:06:42 crc kubenswrapper[4755]: I1124 02:06:42.854763 4755 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Nov 24 02:06:43 crc kubenswrapper[4755]: I1124 02:06:43.161277 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"23d50e60-91da-42c3-8d11-5c22eab88929","Type":"ContainerDied","Data":"997445fcb031ccebc6e22b9caf19e1cc4cf47d9ea3c3c411e6f2c47a3321d1a7"} Nov 24 02:06:43 crc kubenswrapper[4755]: I1124 02:06:43.161646 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="997445fcb031ccebc6e22b9caf19e1cc4cf47d9ea3c3c411e6f2c47a3321d1a7" Nov 24 02:06:43 crc kubenswrapper[4755]: I1124 02:06:43.161372 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.158055 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Nov 24 02:06:49 crc kubenswrapper[4755]: E1124 02:06:49.159223 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23d50e60-91da-42c3-8d11-5c22eab88929" containerName="tempest-tests-tempest-tests-runner" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.159247 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="23d50e60-91da-42c3-8d11-5c22eab88929" containerName="tempest-tests-tempest-tests-runner" Nov 24 02:06:49 crc kubenswrapper[4755]: E1124 02:06:49.159281 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerName="extract-content" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.159297 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerName="extract-content" Nov 24 02:06:49 crc kubenswrapper[4755]: E1124 02:06:49.159312 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ddc94ef-cc64-4281-bab5-087645764186" containerName="extract-content" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.159323 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ddc94ef-cc64-4281-bab5-087645764186" containerName="extract-content" Nov 24 02:06:49 crc kubenswrapper[4755]: E1124 02:06:49.159345 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ddc94ef-cc64-4281-bab5-087645764186" containerName="extract-utilities" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.159356 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ddc94ef-cc64-4281-bab5-087645764186" containerName="extract-utilities" Nov 24 02:06:49 crc kubenswrapper[4755]: E1124 02:06:49.159381 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerName="extract-utilities" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.159393 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerName="extract-utilities" Nov 24 02:06:49 crc kubenswrapper[4755]: E1124 02:06:49.159412 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerName="registry-server" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.159423 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerName="registry-server" Nov 24 02:06:49 crc kubenswrapper[4755]: E1124 02:06:49.159451 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ddc94ef-cc64-4281-bab5-087645764186" containerName="registry-server" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.159461 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ddc94ef-cc64-4281-bab5-087645764186" containerName="registry-server" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.159753 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="23d50e60-91da-42c3-8d11-5c22eab88929" containerName="tempest-tests-tempest-tests-runner" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.159787 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ddc94ef-cc64-4281-bab5-087645764186" containerName="registry-server" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.159807 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cb5bb09-a715-464a-9314-28e1310d1f2e" containerName="registry-server" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.160535 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.162631 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-q8kvm" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.180016 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.286790 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"19e2c4ea-ac40-4007-b635-f5decba54fc3\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.286862 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwpcp\" (UniqueName: \"kubernetes.io/projected/19e2c4ea-ac40-4007-b635-f5decba54fc3-kube-api-access-vwpcp\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"19e2c4ea-ac40-4007-b635-f5decba54fc3\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.389387 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"19e2c4ea-ac40-4007-b635-f5decba54fc3\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.389479 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwpcp\" (UniqueName: \"kubernetes.io/projected/19e2c4ea-ac40-4007-b635-f5decba54fc3-kube-api-access-vwpcp\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"19e2c4ea-ac40-4007-b635-f5decba54fc3\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.390413 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"19e2c4ea-ac40-4007-b635-f5decba54fc3\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.412456 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwpcp\" (UniqueName: \"kubernetes.io/projected/19e2c4ea-ac40-4007-b635-f5decba54fc3-kube-api-access-vwpcp\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"19e2c4ea-ac40-4007-b635-f5decba54fc3\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.427216 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"19e2c4ea-ac40-4007-b635-f5decba54fc3\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.483720 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 24 02:06:49 crc kubenswrapper[4755]: I1124 02:06:49.915356 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Nov 24 02:06:49 crc kubenswrapper[4755]: W1124 02:06:49.919769 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19e2c4ea_ac40_4007_b635_f5decba54fc3.slice/crio-1a74f8819ee218d6a8f0b537d7b8eab74f34c1e6383fbcacbe87f7a4ceeb922a WatchSource:0}: Error finding container 1a74f8819ee218d6a8f0b537d7b8eab74f34c1e6383fbcacbe87f7a4ceeb922a: Status 404 returned error can't find the container with id 1a74f8819ee218d6a8f0b537d7b8eab74f34c1e6383fbcacbe87f7a4ceeb922a Nov 24 02:06:50 crc kubenswrapper[4755]: I1124 02:06:50.229553 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"19e2c4ea-ac40-4007-b635-f5decba54fc3","Type":"ContainerStarted","Data":"1a74f8819ee218d6a8f0b537d7b8eab74f34c1e6383fbcacbe87f7a4ceeb922a"} Nov 24 02:06:51 crc kubenswrapper[4755]: I1124 02:06:51.245929 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"19e2c4ea-ac40-4007-b635-f5decba54fc3","Type":"ContainerStarted","Data":"3e7fd866a7cd0eceb65b583917686ce375dc2f47872bc935e6af80b842798ea0"} Nov 24 02:06:51 crc kubenswrapper[4755]: I1124 02:06:51.273901 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.527990665 podStartE2EDuration="2.273841042s" podCreationTimestamp="2025-11-24 02:06:49 +0000 UTC" firstStartedPulling="2025-11-24 02:06:49.921708475 +0000 UTC m=+3234.607773966" lastFinishedPulling="2025-11-24 02:06:50.667558842 +0000 UTC m=+3235.353624343" observedRunningTime="2025-11-24 02:06:51.26241708 +0000 UTC m=+3235.948482631" watchObservedRunningTime="2025-11-24 02:06:51.273841042 +0000 UTC m=+3235.959906583" Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.646475 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zx8hf/must-gather-dnqqd"] Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.651289 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/must-gather-dnqqd" Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.656875 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zx8hf"/"openshift-service-ca.crt" Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.657076 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zx8hf"/"kube-root-ca.crt" Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.669401 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zx8hf/must-gather-dnqqd"] Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.702663 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ljnt\" (UniqueName: \"kubernetes.io/projected/a93b8fb8-f220-4eac-9433-73cbeca1a486-kube-api-access-6ljnt\") pod \"must-gather-dnqqd\" (UID: \"a93b8fb8-f220-4eac-9433-73cbeca1a486\") " pod="openshift-must-gather-zx8hf/must-gather-dnqqd" Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.702712 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a93b8fb8-f220-4eac-9433-73cbeca1a486-must-gather-output\") pod \"must-gather-dnqqd\" (UID: \"a93b8fb8-f220-4eac-9433-73cbeca1a486\") " pod="openshift-must-gather-zx8hf/must-gather-dnqqd" Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.804170 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ljnt\" (UniqueName: \"kubernetes.io/projected/a93b8fb8-f220-4eac-9433-73cbeca1a486-kube-api-access-6ljnt\") pod \"must-gather-dnqqd\" (UID: \"a93b8fb8-f220-4eac-9433-73cbeca1a486\") " pod="openshift-must-gather-zx8hf/must-gather-dnqqd" Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.804213 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a93b8fb8-f220-4eac-9433-73cbeca1a486-must-gather-output\") pod \"must-gather-dnqqd\" (UID: \"a93b8fb8-f220-4eac-9433-73cbeca1a486\") " pod="openshift-must-gather-zx8hf/must-gather-dnqqd" Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.804820 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a93b8fb8-f220-4eac-9433-73cbeca1a486-must-gather-output\") pod \"must-gather-dnqqd\" (UID: \"a93b8fb8-f220-4eac-9433-73cbeca1a486\") " pod="openshift-must-gather-zx8hf/must-gather-dnqqd" Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.821338 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ljnt\" (UniqueName: \"kubernetes.io/projected/a93b8fb8-f220-4eac-9433-73cbeca1a486-kube-api-access-6ljnt\") pod \"must-gather-dnqqd\" (UID: \"a93b8fb8-f220-4eac-9433-73cbeca1a486\") " pod="openshift-must-gather-zx8hf/must-gather-dnqqd" Nov 24 02:07:13 crc kubenswrapper[4755]: I1124 02:07:13.985396 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/must-gather-dnqqd" Nov 24 02:07:14 crc kubenswrapper[4755]: I1124 02:07:14.463071 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zx8hf/must-gather-dnqqd"] Nov 24 02:07:14 crc kubenswrapper[4755]: I1124 02:07:14.497176 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/must-gather-dnqqd" event={"ID":"a93b8fb8-f220-4eac-9433-73cbeca1a486","Type":"ContainerStarted","Data":"1082240ac11894362d83dfcac97bf1de2e242fe4acc8a89135fddd04ce56e92c"} Nov 24 02:07:21 crc kubenswrapper[4755]: I1124 02:07:21.569256 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/must-gather-dnqqd" event={"ID":"a93b8fb8-f220-4eac-9433-73cbeca1a486","Type":"ContainerStarted","Data":"af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039"} Nov 24 02:07:21 crc kubenswrapper[4755]: I1124 02:07:21.569982 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/must-gather-dnqqd" event={"ID":"a93b8fb8-f220-4eac-9433-73cbeca1a486","Type":"ContainerStarted","Data":"84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403"} Nov 24 02:07:21 crc kubenswrapper[4755]: I1124 02:07:21.590639 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zx8hf/must-gather-dnqqd" podStartSLOduration=2.529988131 podStartE2EDuration="8.590623165s" podCreationTimestamp="2025-11-24 02:07:13 +0000 UTC" firstStartedPulling="2025-11-24 02:07:14.464163221 +0000 UTC m=+3259.150228732" lastFinishedPulling="2025-11-24 02:07:20.524798265 +0000 UTC m=+3265.210863766" observedRunningTime="2025-11-24 02:07:21.58831932 +0000 UTC m=+3266.274384851" watchObservedRunningTime="2025-11-24 02:07:21.590623165 +0000 UTC m=+3266.276688666" Nov 24 02:07:24 crc kubenswrapper[4755]: I1124 02:07:24.404427 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zx8hf/crc-debug-6z7lx"] Nov 24 02:07:24 crc kubenswrapper[4755]: I1124 02:07:24.410543 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" Nov 24 02:07:24 crc kubenswrapper[4755]: I1124 02:07:24.412490 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-zx8hf"/"default-dockercfg-zhx2v" Nov 24 02:07:24 crc kubenswrapper[4755]: I1124 02:07:24.511944 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-host\") pod \"crc-debug-6z7lx\" (UID: \"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524\") " pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" Nov 24 02:07:24 crc kubenswrapper[4755]: I1124 02:07:24.512245 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk65d\" (UniqueName: \"kubernetes.io/projected/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-kube-api-access-xk65d\") pod \"crc-debug-6z7lx\" (UID: \"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524\") " pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" Nov 24 02:07:24 crc kubenswrapper[4755]: I1124 02:07:24.614580 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-host\") pod \"crc-debug-6z7lx\" (UID: \"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524\") " pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" Nov 24 02:07:24 crc kubenswrapper[4755]: I1124 02:07:24.614936 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk65d\" (UniqueName: \"kubernetes.io/projected/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-kube-api-access-xk65d\") pod \"crc-debug-6z7lx\" (UID: \"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524\") " pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" Nov 24 02:07:24 crc kubenswrapper[4755]: I1124 02:07:24.614770 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-host\") pod \"crc-debug-6z7lx\" (UID: \"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524\") " pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" Nov 24 02:07:24 crc kubenswrapper[4755]: I1124 02:07:24.638274 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk65d\" (UniqueName: \"kubernetes.io/projected/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-kube-api-access-xk65d\") pod \"crc-debug-6z7lx\" (UID: \"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524\") " pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" Nov 24 02:07:24 crc kubenswrapper[4755]: I1124 02:07:24.730744 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" Nov 24 02:07:24 crc kubenswrapper[4755]: W1124 02:07:24.776912 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1bb6a2c1_41c8_4ddd_8ba1_e688a8f3e524.slice/crio-f40d9f1ea9754acc1629e88c4abc8655e60524b790df38503b2165bfcfa5ee7e WatchSource:0}: Error finding container f40d9f1ea9754acc1629e88c4abc8655e60524b790df38503b2165bfcfa5ee7e: Status 404 returned error can't find the container with id f40d9f1ea9754acc1629e88c4abc8655e60524b790df38503b2165bfcfa5ee7e Nov 24 02:07:25 crc kubenswrapper[4755]: I1124 02:07:25.613348 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" event={"ID":"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524","Type":"ContainerStarted","Data":"f40d9f1ea9754acc1629e88c4abc8655e60524b790df38503b2165bfcfa5ee7e"} Nov 24 02:07:36 crc kubenswrapper[4755]: I1124 02:07:36.718774 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" event={"ID":"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524","Type":"ContainerStarted","Data":"8840b630173cd527e62eaad2c424a094679a96bf7c9f2489cd2cf5f37cfb7aca"} Nov 24 02:07:36 crc kubenswrapper[4755]: I1124 02:07:36.738473 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" podStartSLOduration=1.085289958 podStartE2EDuration="12.738456295s" podCreationTimestamp="2025-11-24 02:07:24 +0000 UTC" firstStartedPulling="2025-11-24 02:07:24.779246956 +0000 UTC m=+3269.465312457" lastFinishedPulling="2025-11-24 02:07:36.432413283 +0000 UTC m=+3281.118478794" observedRunningTime="2025-11-24 02:07:36.732585259 +0000 UTC m=+3281.418650760" watchObservedRunningTime="2025-11-24 02:07:36.738456295 +0000 UTC m=+3281.424521796" Nov 24 02:08:18 crc kubenswrapper[4755]: I1124 02:08:18.092030 4755 generic.go:334] "Generic (PLEG): container finished" podID="1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524" containerID="8840b630173cd527e62eaad2c424a094679a96bf7c9f2489cd2cf5f37cfb7aca" exitCode=0 Nov 24 02:08:18 crc kubenswrapper[4755]: I1124 02:08:18.092121 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" event={"ID":"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524","Type":"ContainerDied","Data":"8840b630173cd527e62eaad2c424a094679a96bf7c9f2489cd2cf5f37cfb7aca"} Nov 24 02:08:19 crc kubenswrapper[4755]: I1124 02:08:19.225228 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" Nov 24 02:08:19 crc kubenswrapper[4755]: I1124 02:08:19.256199 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zx8hf/crc-debug-6z7lx"] Nov 24 02:08:19 crc kubenswrapper[4755]: I1124 02:08:19.263549 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zx8hf/crc-debug-6z7lx"] Nov 24 02:08:19 crc kubenswrapper[4755]: I1124 02:08:19.353307 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xk65d\" (UniqueName: \"kubernetes.io/projected/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-kube-api-access-xk65d\") pod \"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524\" (UID: \"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524\") " Nov 24 02:08:19 crc kubenswrapper[4755]: I1124 02:08:19.353435 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-host\") pod \"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524\" (UID: \"1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524\") " Nov 24 02:08:19 crc kubenswrapper[4755]: I1124 02:08:19.353918 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-host" (OuterVolumeSpecName: "host") pod "1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524" (UID: "1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 02:08:19 crc kubenswrapper[4755]: I1124 02:08:19.359477 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-kube-api-access-xk65d" (OuterVolumeSpecName: "kube-api-access-xk65d") pod "1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524" (UID: "1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524"). InnerVolumeSpecName "kube-api-access-xk65d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:08:19 crc kubenswrapper[4755]: I1124 02:08:19.456193 4755 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-host\") on node \"crc\" DevicePath \"\"" Nov 24 02:08:19 crc kubenswrapper[4755]: I1124 02:08:19.456228 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xk65d\" (UniqueName: \"kubernetes.io/projected/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524-kube-api-access-xk65d\") on node \"crc\" DevicePath \"\"" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.006962 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524" path="/var/lib/kubelet/pods/1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524/volumes" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.111629 4755 scope.go:117] "RemoveContainer" containerID="8840b630173cd527e62eaad2c424a094679a96bf7c9f2489cd2cf5f37cfb7aca" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.112048 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-6z7lx" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.411298 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zx8hf/crc-debug-mnh2v"] Nov 24 02:08:20 crc kubenswrapper[4755]: E1124 02:08:20.411713 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524" containerName="container-00" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.411728 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524" containerName="container-00" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.411964 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bb6a2c1-41c8-4ddd-8ba1-e688a8f3e524" containerName="container-00" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.412680 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.414699 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-zx8hf"/"default-dockercfg-zhx2v" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.492217 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9b9pk\" (UniqueName: \"kubernetes.io/projected/413d0e70-bd80-4015-93ee-4c3a4b791f66-kube-api-access-9b9pk\") pod \"crc-debug-mnh2v\" (UID: \"413d0e70-bd80-4015-93ee-4c3a4b791f66\") " pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.492276 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/413d0e70-bd80-4015-93ee-4c3a4b791f66-host\") pod \"crc-debug-mnh2v\" (UID: \"413d0e70-bd80-4015-93ee-4c3a4b791f66\") " pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.594475 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9b9pk\" (UniqueName: \"kubernetes.io/projected/413d0e70-bd80-4015-93ee-4c3a4b791f66-kube-api-access-9b9pk\") pod \"crc-debug-mnh2v\" (UID: \"413d0e70-bd80-4015-93ee-4c3a4b791f66\") " pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.594890 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/413d0e70-bd80-4015-93ee-4c3a4b791f66-host\") pod \"crc-debug-mnh2v\" (UID: \"413d0e70-bd80-4015-93ee-4c3a4b791f66\") " pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.595029 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/413d0e70-bd80-4015-93ee-4c3a4b791f66-host\") pod \"crc-debug-mnh2v\" (UID: \"413d0e70-bd80-4015-93ee-4c3a4b791f66\") " pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.620126 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9b9pk\" (UniqueName: \"kubernetes.io/projected/413d0e70-bd80-4015-93ee-4c3a4b791f66-kube-api-access-9b9pk\") pod \"crc-debug-mnh2v\" (UID: \"413d0e70-bd80-4015-93ee-4c3a4b791f66\") " pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" Nov 24 02:08:20 crc kubenswrapper[4755]: I1124 02:08:20.730008 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" Nov 24 02:08:21 crc kubenswrapper[4755]: I1124 02:08:21.122453 4755 generic.go:334] "Generic (PLEG): container finished" podID="413d0e70-bd80-4015-93ee-4c3a4b791f66" containerID="30a70f3de4e4455778304ce1f185d56a8f76555acd9a4e0937c7d8c2839b4e92" exitCode=0 Nov 24 02:08:21 crc kubenswrapper[4755]: I1124 02:08:21.122626 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" event={"ID":"413d0e70-bd80-4015-93ee-4c3a4b791f66","Type":"ContainerDied","Data":"30a70f3de4e4455778304ce1f185d56a8f76555acd9a4e0937c7d8c2839b4e92"} Nov 24 02:08:21 crc kubenswrapper[4755]: I1124 02:08:21.122938 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" event={"ID":"413d0e70-bd80-4015-93ee-4c3a4b791f66","Type":"ContainerStarted","Data":"b96b2da095968e0a9abc0bd9531c506964f550d52c6b66e59c9eedef8bb3aec2"} Nov 24 02:08:21 crc kubenswrapper[4755]: I1124 02:08:21.586073 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zx8hf/crc-debug-mnh2v"] Nov 24 02:08:21 crc kubenswrapper[4755]: I1124 02:08:21.595336 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zx8hf/crc-debug-mnh2v"] Nov 24 02:08:22 crc kubenswrapper[4755]: I1124 02:08:22.252383 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" Nov 24 02:08:22 crc kubenswrapper[4755]: I1124 02:08:22.333522 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9b9pk\" (UniqueName: \"kubernetes.io/projected/413d0e70-bd80-4015-93ee-4c3a4b791f66-kube-api-access-9b9pk\") pod \"413d0e70-bd80-4015-93ee-4c3a4b791f66\" (UID: \"413d0e70-bd80-4015-93ee-4c3a4b791f66\") " Nov 24 02:08:22 crc kubenswrapper[4755]: I1124 02:08:22.333664 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/413d0e70-bd80-4015-93ee-4c3a4b791f66-host\") pod \"413d0e70-bd80-4015-93ee-4c3a4b791f66\" (UID: \"413d0e70-bd80-4015-93ee-4c3a4b791f66\") " Nov 24 02:08:22 crc kubenswrapper[4755]: I1124 02:08:22.333721 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/413d0e70-bd80-4015-93ee-4c3a4b791f66-host" (OuterVolumeSpecName: "host") pod "413d0e70-bd80-4015-93ee-4c3a4b791f66" (UID: "413d0e70-bd80-4015-93ee-4c3a4b791f66"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 02:08:22 crc kubenswrapper[4755]: I1124 02:08:22.334394 4755 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/413d0e70-bd80-4015-93ee-4c3a4b791f66-host\") on node \"crc\" DevicePath \"\"" Nov 24 02:08:22 crc kubenswrapper[4755]: I1124 02:08:22.342271 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/413d0e70-bd80-4015-93ee-4c3a4b791f66-kube-api-access-9b9pk" (OuterVolumeSpecName: "kube-api-access-9b9pk") pod "413d0e70-bd80-4015-93ee-4c3a4b791f66" (UID: "413d0e70-bd80-4015-93ee-4c3a4b791f66"). InnerVolumeSpecName "kube-api-access-9b9pk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:08:22 crc kubenswrapper[4755]: I1124 02:08:22.437347 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9b9pk\" (UniqueName: \"kubernetes.io/projected/413d0e70-bd80-4015-93ee-4c3a4b791f66-kube-api-access-9b9pk\") on node \"crc\" DevicePath \"\"" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.018329 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zx8hf/crc-debug-7fblw"] Nov 24 02:08:23 crc kubenswrapper[4755]: E1124 02:08:23.019556 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="413d0e70-bd80-4015-93ee-4c3a4b791f66" containerName="container-00" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.019581 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="413d0e70-bd80-4015-93ee-4c3a4b791f66" containerName="container-00" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.019822 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="413d0e70-bd80-4015-93ee-4c3a4b791f66" containerName="container-00" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.020434 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-7fblw" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.143536 4755 scope.go:117] "RemoveContainer" containerID="30a70f3de4e4455778304ce1f185d56a8f76555acd9a4e0937c7d8c2839b4e92" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.143580 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-mnh2v" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.152223 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsbn5\" (UniqueName: \"kubernetes.io/projected/0eb68a0d-4b96-4a41-989c-f635090a79e1-kube-api-access-dsbn5\") pod \"crc-debug-7fblw\" (UID: \"0eb68a0d-4b96-4a41-989c-f635090a79e1\") " pod="openshift-must-gather-zx8hf/crc-debug-7fblw" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.152317 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0eb68a0d-4b96-4a41-989c-f635090a79e1-host\") pod \"crc-debug-7fblw\" (UID: \"0eb68a0d-4b96-4a41-989c-f635090a79e1\") " pod="openshift-must-gather-zx8hf/crc-debug-7fblw" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.253991 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0eb68a0d-4b96-4a41-989c-f635090a79e1-host\") pod \"crc-debug-7fblw\" (UID: \"0eb68a0d-4b96-4a41-989c-f635090a79e1\") " pod="openshift-must-gather-zx8hf/crc-debug-7fblw" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.254092 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0eb68a0d-4b96-4a41-989c-f635090a79e1-host\") pod \"crc-debug-7fblw\" (UID: \"0eb68a0d-4b96-4a41-989c-f635090a79e1\") " pod="openshift-must-gather-zx8hf/crc-debug-7fblw" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.254316 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsbn5\" (UniqueName: \"kubernetes.io/projected/0eb68a0d-4b96-4a41-989c-f635090a79e1-kube-api-access-dsbn5\") pod \"crc-debug-7fblw\" (UID: \"0eb68a0d-4b96-4a41-989c-f635090a79e1\") " pod="openshift-must-gather-zx8hf/crc-debug-7fblw" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.270848 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsbn5\" (UniqueName: \"kubernetes.io/projected/0eb68a0d-4b96-4a41-989c-f635090a79e1-kube-api-access-dsbn5\") pod \"crc-debug-7fblw\" (UID: \"0eb68a0d-4b96-4a41-989c-f635090a79e1\") " pod="openshift-must-gather-zx8hf/crc-debug-7fblw" Nov 24 02:08:23 crc kubenswrapper[4755]: I1124 02:08:23.341136 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-7fblw" Nov 24 02:08:23 crc kubenswrapper[4755]: W1124 02:08:23.365116 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0eb68a0d_4b96_4a41_989c_f635090a79e1.slice/crio-450d5d3f42d6eed96529e3cbd9dcf3e036510ddff75759662d3598d947f9334e WatchSource:0}: Error finding container 450d5d3f42d6eed96529e3cbd9dcf3e036510ddff75759662d3598d947f9334e: Status 404 returned error can't find the container with id 450d5d3f42d6eed96529e3cbd9dcf3e036510ddff75759662d3598d947f9334e Nov 24 02:08:24 crc kubenswrapper[4755]: I1124 02:08:24.010581 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="413d0e70-bd80-4015-93ee-4c3a4b791f66" path="/var/lib/kubelet/pods/413d0e70-bd80-4015-93ee-4c3a4b791f66/volumes" Nov 24 02:08:24 crc kubenswrapper[4755]: I1124 02:08:24.154321 4755 generic.go:334] "Generic (PLEG): container finished" podID="0eb68a0d-4b96-4a41-989c-f635090a79e1" containerID="91fb2b5facd6d9ecac922b7f388b8ef35e3a9cf66accb1083c8051971aa04d25" exitCode=0 Nov 24 02:08:24 crc kubenswrapper[4755]: I1124 02:08:24.154356 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/crc-debug-7fblw" event={"ID":"0eb68a0d-4b96-4a41-989c-f635090a79e1","Type":"ContainerDied","Data":"91fb2b5facd6d9ecac922b7f388b8ef35e3a9cf66accb1083c8051971aa04d25"} Nov 24 02:08:24 crc kubenswrapper[4755]: I1124 02:08:24.154377 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/crc-debug-7fblw" event={"ID":"0eb68a0d-4b96-4a41-989c-f635090a79e1","Type":"ContainerStarted","Data":"450d5d3f42d6eed96529e3cbd9dcf3e036510ddff75759662d3598d947f9334e"} Nov 24 02:08:24 crc kubenswrapper[4755]: I1124 02:08:24.185592 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zx8hf/crc-debug-7fblw"] Nov 24 02:08:24 crc kubenswrapper[4755]: I1124 02:08:24.192703 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zx8hf/crc-debug-7fblw"] Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.271050 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-7fblw" Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.419045 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsbn5\" (UniqueName: \"kubernetes.io/projected/0eb68a0d-4b96-4a41-989c-f635090a79e1-kube-api-access-dsbn5\") pod \"0eb68a0d-4b96-4a41-989c-f635090a79e1\" (UID: \"0eb68a0d-4b96-4a41-989c-f635090a79e1\") " Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.419191 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0eb68a0d-4b96-4a41-989c-f635090a79e1-host\") pod \"0eb68a0d-4b96-4a41-989c-f635090a79e1\" (UID: \"0eb68a0d-4b96-4a41-989c-f635090a79e1\") " Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.419230 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0eb68a0d-4b96-4a41-989c-f635090a79e1-host" (OuterVolumeSpecName: "host") pod "0eb68a0d-4b96-4a41-989c-f635090a79e1" (UID: "0eb68a0d-4b96-4a41-989c-f635090a79e1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.419598 4755 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0eb68a0d-4b96-4a41-989c-f635090a79e1-host\") on node \"crc\" DevicePath \"\"" Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.423558 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eb68a0d-4b96-4a41-989c-f635090a79e1-kube-api-access-dsbn5" (OuterVolumeSpecName: "kube-api-access-dsbn5") pod "0eb68a0d-4b96-4a41-989c-f635090a79e1" (UID: "0eb68a0d-4b96-4a41-989c-f635090a79e1"). InnerVolumeSpecName "kube-api-access-dsbn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.521572 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsbn5\" (UniqueName: \"kubernetes.io/projected/0eb68a0d-4b96-4a41-989c-f635090a79e1-kube-api-access-dsbn5\") on node \"crc\" DevicePath \"\"" Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.870043 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tvqw7"] Nov 24 02:08:25 crc kubenswrapper[4755]: E1124 02:08:25.871073 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb68a0d-4b96-4a41-989c-f635090a79e1" containerName="container-00" Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.871098 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb68a0d-4b96-4a41-989c-f635090a79e1" containerName="container-00" Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.871360 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eb68a0d-4b96-4a41-989c-f635090a79e1" containerName="container-00" Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.873130 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:25 crc kubenswrapper[4755]: I1124 02:08:25.880635 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tvqw7"] Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.018186 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eb68a0d-4b96-4a41-989c-f635090a79e1" path="/var/lib/kubelet/pods/0eb68a0d-4b96-4a41-989c-f635090a79e1/volumes" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.034208 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7s6x\" (UniqueName: \"kubernetes.io/projected/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-kube-api-access-f7s6x\") pod \"redhat-operators-tvqw7\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.034269 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-catalog-content\") pod \"redhat-operators-tvqw7\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.034353 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-utilities\") pod \"redhat-operators-tvqw7\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.135912 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-utilities\") pod \"redhat-operators-tvqw7\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.136118 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7s6x\" (UniqueName: \"kubernetes.io/projected/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-kube-api-access-f7s6x\") pod \"redhat-operators-tvqw7\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.136152 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-catalog-content\") pod \"redhat-operators-tvqw7\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.136415 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-utilities\") pod \"redhat-operators-tvqw7\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.136953 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-catalog-content\") pod \"redhat-operators-tvqw7\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.161877 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7s6x\" (UniqueName: \"kubernetes.io/projected/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-kube-api-access-f7s6x\") pod \"redhat-operators-tvqw7\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.175326 4755 scope.go:117] "RemoveContainer" containerID="91fb2b5facd6d9ecac922b7f388b8ef35e3a9cf66accb1083c8051971aa04d25" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.175364 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/crc-debug-7fblw" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.194079 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:26 crc kubenswrapper[4755]: I1124 02:08:26.681941 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tvqw7"] Nov 24 02:08:26 crc kubenswrapper[4755]: W1124 02:08:26.682590 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d93c119_37ee_4cf6_a01e_67c40d43f9ce.slice/crio-62efe8282071bf85e66302b67eccf2283d6d11f47b0660aba075e9261d6a0c44 WatchSource:0}: Error finding container 62efe8282071bf85e66302b67eccf2283d6d11f47b0660aba075e9261d6a0c44: Status 404 returned error can't find the container with id 62efe8282071bf85e66302b67eccf2283d6d11f47b0660aba075e9261d6a0c44 Nov 24 02:08:27 crc kubenswrapper[4755]: I1124 02:08:27.186371 4755 generic.go:334] "Generic (PLEG): container finished" podID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerID="2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6" exitCode=0 Nov 24 02:08:27 crc kubenswrapper[4755]: I1124 02:08:27.186489 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvqw7" event={"ID":"9d93c119-37ee-4cf6-a01e-67c40d43f9ce","Type":"ContainerDied","Data":"2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6"} Nov 24 02:08:27 crc kubenswrapper[4755]: I1124 02:08:27.186723 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvqw7" event={"ID":"9d93c119-37ee-4cf6-a01e-67c40d43f9ce","Type":"ContainerStarted","Data":"62efe8282071bf85e66302b67eccf2283d6d11f47b0660aba075e9261d6a0c44"} Nov 24 02:08:28 crc kubenswrapper[4755]: I1124 02:08:28.200721 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvqw7" event={"ID":"9d93c119-37ee-4cf6-a01e-67c40d43f9ce","Type":"ContainerStarted","Data":"f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8"} Nov 24 02:08:29 crc kubenswrapper[4755]: I1124 02:08:29.210543 4755 generic.go:334] "Generic (PLEG): container finished" podID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerID="f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8" exitCode=0 Nov 24 02:08:29 crc kubenswrapper[4755]: I1124 02:08:29.210696 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvqw7" event={"ID":"9d93c119-37ee-4cf6-a01e-67c40d43f9ce","Type":"ContainerDied","Data":"f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8"} Nov 24 02:08:31 crc kubenswrapper[4755]: I1124 02:08:31.227689 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvqw7" event={"ID":"9d93c119-37ee-4cf6-a01e-67c40d43f9ce","Type":"ContainerStarted","Data":"97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e"} Nov 24 02:08:31 crc kubenswrapper[4755]: I1124 02:08:31.246487 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tvqw7" podStartSLOduration=3.737653065 podStartE2EDuration="6.246456197s" podCreationTimestamp="2025-11-24 02:08:25 +0000 UTC" firstStartedPulling="2025-11-24 02:08:27.187732199 +0000 UTC m=+3331.873797700" lastFinishedPulling="2025-11-24 02:08:29.696535331 +0000 UTC m=+3334.382600832" observedRunningTime="2025-11-24 02:08:31.244991975 +0000 UTC m=+3335.931057496" watchObservedRunningTime="2025-11-24 02:08:31.246456197 +0000 UTC m=+3335.932521698" Nov 24 02:08:36 crc kubenswrapper[4755]: I1124 02:08:36.196409 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:36 crc kubenswrapper[4755]: I1124 02:08:36.201811 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:37 crc kubenswrapper[4755]: I1124 02:08:37.243989 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tvqw7" podUID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerName="registry-server" probeResult="failure" output=< Nov 24 02:08:37 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Nov 24 02:08:37 crc kubenswrapper[4755]: > Nov 24 02:08:39 crc kubenswrapper[4755]: I1124 02:08:39.577947 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-64586f69c8-7phjt_f51ca687-cad4-4e48-bb34-1fd95c8bf47d/barbican-api/0.log" Nov 24 02:08:39 crc kubenswrapper[4755]: I1124 02:08:39.755147 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-64586f69c8-7phjt_f51ca687-cad4-4e48-bb34-1fd95c8bf47d/barbican-api-log/0.log" Nov 24 02:08:39 crc kubenswrapper[4755]: I1124 02:08:39.867748 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-86fc6cb5d-5rbfc_b447c315-6a28-4a18-af48-fbcf84cd0c00/barbican-keystone-listener/0.log" Nov 24 02:08:39 crc kubenswrapper[4755]: I1124 02:08:39.877742 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-86fc6cb5d-5rbfc_b447c315-6a28-4a18-af48-fbcf84cd0c00/barbican-keystone-listener-log/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.033153 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-545bd7b455-5w47r_4100cddd-df77-4b8a-af0c-746bbd98c80f/barbican-worker/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.072950 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-545bd7b455-5w47r_4100cddd-df77-4b8a-af0c-746bbd98c80f/barbican-worker-log/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.219802 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd_a1f11949-be37-4a9d-9e73-b0cbc20a6d1e/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.337716 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cbaee188-8eb8-461e-ba33-4abbb59c4ef7/ceilometer-notification-agent/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.345382 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cbaee188-8eb8-461e-ba33-4abbb59c4ef7/ceilometer-central-agent/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.384670 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cbaee188-8eb8-461e-ba33-4abbb59c4ef7/proxy-httpd/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.505630 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cbaee188-8eb8-461e-ba33-4abbb59c4ef7/sg-core/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.593050 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_a7aabf3f-133b-4bdf-86e6-9fb76e89d076/cinder-api/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.607570 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_a7aabf3f-133b-4bdf-86e6-9fb76e89d076/cinder-api-log/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.784587 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0404892f-7ed1-4990-a8b1-960e6531b017/cinder-scheduler/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.794441 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0404892f-7ed1-4990-a8b1-960e6531b017/probe/0.log" Nov 24 02:08:40 crc kubenswrapper[4755]: I1124 02:08:40.940576 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-czxxj_f823da2a-bba1-4b6e-9504-e03ec6a3b94f/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:41 crc kubenswrapper[4755]: I1124 02:08:41.001206 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb_4afec439-5744-46c0-a074-88c86ac07fbe/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:41 crc kubenswrapper[4755]: I1124 02:08:41.139343 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-6vs5l_ae6a5980-1b82-42c7-b86c-109e43e389cd/init/0.log" Nov 24 02:08:41 crc kubenswrapper[4755]: I1124 02:08:41.316215 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-6vs5l_ae6a5980-1b82-42c7-b86c-109e43e389cd/init/0.log" Nov 24 02:08:41 crc kubenswrapper[4755]: I1124 02:08:41.364771 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-6vs5l_ae6a5980-1b82-42c7-b86c-109e43e389cd/dnsmasq-dns/0.log" Nov 24 02:08:41 crc kubenswrapper[4755]: I1124 02:08:41.377304 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8_c0341d77-5182-4cb4-b4f8-4b3389c7887b/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:41 crc kubenswrapper[4755]: I1124 02:08:41.543562 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_d19dab7a-f075-4b26-a45f-1542a445a8a6/glance-log/0.log" Nov 24 02:08:41 crc kubenswrapper[4755]: I1124 02:08:41.564438 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_d19dab7a-f075-4b26-a45f-1542a445a8a6/glance-httpd/0.log" Nov 24 02:08:41 crc kubenswrapper[4755]: I1124 02:08:41.726085 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3658a90c-83fd-4a8e-9d15-c1b2cac647f1/glance-httpd/0.log" Nov 24 02:08:41 crc kubenswrapper[4755]: I1124 02:08:41.734038 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3658a90c-83fd-4a8e-9d15-c1b2cac647f1/glance-log/0.log" Nov 24 02:08:41 crc kubenswrapper[4755]: I1124 02:08:41.872051 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-75d8fb7cd4-vbxkn_5d176bdd-fe2f-4ed0-a930-2a6ae568b400/horizon/0.log" Nov 24 02:08:42 crc kubenswrapper[4755]: I1124 02:08:42.008091 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-km2xv_dd7e309d-d807-4897-b8e8-cff4ed2c5ac9/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:42 crc kubenswrapper[4755]: I1124 02:08:42.151318 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-v9bk9_2b4a98ef-0655-4257-be72-766516d54fc4/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:42 crc kubenswrapper[4755]: I1124 02:08:42.263899 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-75d8fb7cd4-vbxkn_5d176bdd-fe2f-4ed0-a930-2a6ae568b400/horizon-log/0.log" Nov 24 02:08:42 crc kubenswrapper[4755]: I1124 02:08:42.465402 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29399161-hl9nq_97f32270-c319-4ef7-9784-1a63c16a0164/keystone-cron/0.log" Nov 24 02:08:42 crc kubenswrapper[4755]: I1124 02:08:42.472033 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-68b755649b-gdjxt_56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f/keystone-api/0.log" Nov 24 02:08:42 crc kubenswrapper[4755]: I1124 02:08:42.628772 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_c8452109-2acc-4f1b-848f-e1b5cb87590d/kube-state-metrics/0.log" Nov 24 02:08:42 crc kubenswrapper[4755]: I1124 02:08:42.683658 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g_cff1906b-beb7-4b0f-b20b-c0d155437b90/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:43 crc kubenswrapper[4755]: I1124 02:08:43.017477 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-765f9bdf9-sx8ch_05c3cedf-f9a5-453f-a879-fea1939c9f87/neutron-api/0.log" Nov 24 02:08:43 crc kubenswrapper[4755]: I1124 02:08:43.074514 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-765f9bdf9-sx8ch_05c3cedf-f9a5-453f-a879-fea1939c9f87/neutron-httpd/0.log" Nov 24 02:08:43 crc kubenswrapper[4755]: I1124 02:08:43.258270 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn_0c0128e5-6f6e-4d49-813c-36d2959a8e3e/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:43 crc kubenswrapper[4755]: I1124 02:08:43.856360 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7/nova-api-log/0.log" Nov 24 02:08:43 crc kubenswrapper[4755]: I1124 02:08:43.891840 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_d3b1705c-2c0d-4bd6-b928-87a6a105cb4d/nova-cell0-conductor-conductor/0.log" Nov 24 02:08:44 crc kubenswrapper[4755]: I1124 02:08:44.066458 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7/nova-api-api/0.log" Nov 24 02:08:44 crc kubenswrapper[4755]: I1124 02:08:44.132683 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_8974724e-99a8-4edd-8637-2767f33d3562/nova-cell1-conductor-conductor/0.log" Nov 24 02:08:44 crc kubenswrapper[4755]: I1124 02:08:44.233756 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_0fbd9862-f65c-4c62-8701-83a23ce4211f/nova-cell1-novncproxy-novncproxy/0.log" Nov 24 02:08:44 crc kubenswrapper[4755]: I1124 02:08:44.417474 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-wx7v6_3f6ff548-9e89-4d7c-8a41-d5c769a8d871/nova-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:44 crc kubenswrapper[4755]: I1124 02:08:44.524484 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6db35847-3127-4ec0-b617-18e9c0f03f8a/nova-metadata-log/0.log" Nov 24 02:08:44 crc kubenswrapper[4755]: I1124 02:08:44.812007 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_c2bb0127-edec-4f95-a79b-b35b3607c968/nova-scheduler-scheduler/0.log" Nov 24 02:08:44 crc kubenswrapper[4755]: I1124 02:08:44.856043 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_0b8f9a57-22fa-4115-942f-e6f7343a78e4/mysql-bootstrap/0.log" Nov 24 02:08:45 crc kubenswrapper[4755]: I1124 02:08:45.059760 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_0b8f9a57-22fa-4115-942f-e6f7343a78e4/mysql-bootstrap/0.log" Nov 24 02:08:45 crc kubenswrapper[4755]: I1124 02:08:45.083320 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_0b8f9a57-22fa-4115-942f-e6f7343a78e4/galera/0.log" Nov 24 02:08:45 crc kubenswrapper[4755]: I1124 02:08:45.226660 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_22215216-efac-4810-90f1-4d42ccc6399c/mysql-bootstrap/0.log" Nov 24 02:08:45 crc kubenswrapper[4755]: I1124 02:08:45.453239 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_22215216-efac-4810-90f1-4d42ccc6399c/mysql-bootstrap/0.log" Nov 24 02:08:45 crc kubenswrapper[4755]: I1124 02:08:45.510833 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6db35847-3127-4ec0-b617-18e9c0f03f8a/nova-metadata-metadata/0.log" Nov 24 02:08:45 crc kubenswrapper[4755]: I1124 02:08:45.513641 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_22215216-efac-4810-90f1-4d42ccc6399c/galera/0.log" Nov 24 02:08:45 crc kubenswrapper[4755]: I1124 02:08:45.628199 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_1c6ba259-f60e-4b3f-b901-e42aaff73569/openstackclient/0.log" Nov 24 02:08:45 crc kubenswrapper[4755]: I1124 02:08:45.730929 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-7tzgl_06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0/ovn-controller/0.log" Nov 24 02:08:45 crc kubenswrapper[4755]: I1124 02:08:45.903539 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-8kczp_509f3153-a59d-4614-a753-8cd8df81734c/openstack-network-exporter/0.log" Nov 24 02:08:45 crc kubenswrapper[4755]: I1124 02:08:45.992484 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-khjj5_3e8faee1-2ae1-4f03-9379-d35e533f222d/ovsdb-server-init/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.167696 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-khjj5_3e8faee1-2ae1-4f03-9379-d35e533f222d/ovs-vswitchd/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.175366 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-khjj5_3e8faee1-2ae1-4f03-9379-d35e533f222d/ovsdb-server-init/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.232827 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-khjj5_3e8faee1-2ae1-4f03-9379-d35e533f222d/ovsdb-server/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.245045 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.302056 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.434498 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-bjt8c_0227ae83-e6f1-477d-b3b0-12cfcd8ae318/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.439282 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7c5a9efb-a4cf-4485-b3bd-972318209141/openstack-network-exporter/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.474648 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7c5a9efb-a4cf-4485-b3bd-972318209141/ovn-northd/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.486785 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tvqw7"] Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.682832 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9b8b513e-db84-49e7-88e5-b023b20bd604/openstack-network-exporter/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.738332 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9b8b513e-db84-49e7-88e5-b023b20bd604/ovsdbserver-nb/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.879053 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1fdb8eaf-1302-4fff-a38f-673a89890e64/openstack-network-exporter/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.883431 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1fdb8eaf-1302-4fff-a38f-673a89890e64/ovsdbserver-sb/0.log" Nov 24 02:08:46 crc kubenswrapper[4755]: I1124 02:08:46.979168 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8674657456-64797_521180f5-3721-4b4d-8359-e7b69268a36a/placement-api/0.log" Nov 24 02:08:47 crc kubenswrapper[4755]: I1124 02:08:47.132084 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8674657456-64797_521180f5-3721-4b4d-8359-e7b69268a36a/placement-log/0.log" Nov 24 02:08:47 crc kubenswrapper[4755]: I1124 02:08:47.262079 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ba8cbfc3-74a5-4ea6-bd18-8fcab5462623/setup-container/0.log" Nov 24 02:08:47 crc kubenswrapper[4755]: I1124 02:08:47.379726 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tvqw7" podUID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerName="registry-server" containerID="cri-o://97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e" gracePeriod=2 Nov 24 02:08:47 crc kubenswrapper[4755]: I1124 02:08:47.437290 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ba8cbfc3-74a5-4ea6-bd18-8fcab5462623/setup-container/0.log" Nov 24 02:08:47 crc kubenswrapper[4755]: I1124 02:08:47.458154 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ba8cbfc3-74a5-4ea6-bd18-8fcab5462623/rabbitmq/0.log" Nov 24 02:08:47 crc kubenswrapper[4755]: I1124 02:08:47.616970 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_47ce72c9-6a27-44ff-80ed-d844fa0f1d2e/setup-container/0.log" Nov 24 02:08:47 crc kubenswrapper[4755]: I1124 02:08:47.830613 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_47ce72c9-6a27-44ff-80ed-d844fa0f1d2e/setup-container/0.log" Nov 24 02:08:47 crc kubenswrapper[4755]: I1124 02:08:47.893411 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9_b2e96444-0ef2-436c-9641-b980fd1961d6/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:47 crc kubenswrapper[4755]: I1124 02:08:47.898318 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_47ce72c9-6a27-44ff-80ed-d844fa0f1d2e/rabbitmq/0.log" Nov 24 02:08:47 crc kubenswrapper[4755]: I1124 02:08:47.970570 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.104145 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-m85m8_ff52144e-9604-44f1-9af6-65f8c9928560/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.140376 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-catalog-content\") pod \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.140421 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-utilities\") pod \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.140533 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7s6x\" (UniqueName: \"kubernetes.io/projected/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-kube-api-access-f7s6x\") pod \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\" (UID: \"9d93c119-37ee-4cf6-a01e-67c40d43f9ce\") " Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.140964 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-utilities" (OuterVolumeSpecName: "utilities") pod "9d93c119-37ee-4cf6-a01e-67c40d43f9ce" (UID: "9d93c119-37ee-4cf6-a01e-67c40d43f9ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.141114 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.155991 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-kube-api-access-f7s6x" (OuterVolumeSpecName: "kube-api-access-f7s6x") pod "9d93c119-37ee-4cf6-a01e-67c40d43f9ce" (UID: "9d93c119-37ee-4cf6-a01e-67c40d43f9ce"). InnerVolumeSpecName "kube-api-access-f7s6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.227490 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd_96e0eeaf-102b-47ad-8f60-02115894de6e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.233865 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9d93c119-37ee-4cf6-a01e-67c40d43f9ce" (UID: "9d93c119-37ee-4cf6-a01e-67c40d43f9ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.242341 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.242372 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7s6x\" (UniqueName: \"kubernetes.io/projected/9d93c119-37ee-4cf6-a01e-67c40d43f9ce-kube-api-access-f7s6x\") on node \"crc\" DevicePath \"\"" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.393218 4755 generic.go:334] "Generic (PLEG): container finished" podID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerID="97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e" exitCode=0 Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.393257 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvqw7" event={"ID":"9d93c119-37ee-4cf6-a01e-67c40d43f9ce","Type":"ContainerDied","Data":"97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e"} Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.393281 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvqw7" event={"ID":"9d93c119-37ee-4cf6-a01e-67c40d43f9ce","Type":"ContainerDied","Data":"62efe8282071bf85e66302b67eccf2283d6d11f47b0660aba075e9261d6a0c44"} Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.393300 4755 scope.go:117] "RemoveContainer" containerID="97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.393405 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvqw7" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.415406 4755 scope.go:117] "RemoveContainer" containerID="f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.440202 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-fnjgp_6f8286f1-2efd-487d-9feb-fe2eb1fa0112/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.461038 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-bvm62_bca53d6d-913f-408e-a979-2515d6ee4c8e/ssh-known-hosts-edpm-deployment/0.log" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.468751 4755 scope.go:117] "RemoveContainer" containerID="2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.470100 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tvqw7"] Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.483663 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tvqw7"] Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.489975 4755 scope.go:117] "RemoveContainer" containerID="97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e" Nov 24 02:08:48 crc kubenswrapper[4755]: E1124 02:08:48.490354 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e\": container with ID starting with 97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e not found: ID does not exist" containerID="97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.490393 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e"} err="failed to get container status \"97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e\": rpc error: code = NotFound desc = could not find container \"97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e\": container with ID starting with 97eb0b5f1325f3d1b9acb35a3961b213a82b4801996fb765fdbed6682e94377e not found: ID does not exist" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.490421 4755 scope.go:117] "RemoveContainer" containerID="f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8" Nov 24 02:08:48 crc kubenswrapper[4755]: E1124 02:08:48.490770 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8\": container with ID starting with f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8 not found: ID does not exist" containerID="f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.490844 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8"} err="failed to get container status \"f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8\": rpc error: code = NotFound desc = could not find container \"f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8\": container with ID starting with f810bb70e86989a356dfd87803e2698da1f323c926cc3f3769fb67f0a8a2f5f8 not found: ID does not exist" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.490878 4755 scope.go:117] "RemoveContainer" containerID="2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6" Nov 24 02:08:48 crc kubenswrapper[4755]: E1124 02:08:48.491231 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6\": container with ID starting with 2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6 not found: ID does not exist" containerID="2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.491272 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6"} err="failed to get container status \"2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6\": rpc error: code = NotFound desc = could not find container \"2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6\": container with ID starting with 2db14d08342c460b61758b98bd226d57650f55326a1dbe7929093f32eab246a6 not found: ID does not exist" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.718761 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-55d8c9dc95-pj9b5_5e2272ce-0bb7-4cc8-a11b-be4947646efd/proxy-server/0.log" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.822298 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-lpg42_a67b2b8c-3846-4f9d-a5d0-9279028f63e5/swift-ring-rebalance/0.log" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.876751 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-55d8c9dc95-pj9b5_5e2272ce-0bb7-4cc8-a11b-be4947646efd/proxy-httpd/0.log" Nov 24 02:08:48 crc kubenswrapper[4755]: I1124 02:08:48.965425 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/account-auditor/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.001706 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/account-reaper/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.125500 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/account-replicator/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.165096 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/container-auditor/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.190064 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/account-server/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.224792 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/container-replicator/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.315694 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/container-server/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.399959 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/container-updater/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.433769 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/object-expirer/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.445336 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/object-auditor/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.567839 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/object-replicator/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.647029 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/object-server/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.668922 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/object-updater/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.704360 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/rsync/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.787761 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/swift-recon-cron/0.log" Nov 24 02:08:49 crc kubenswrapper[4755]: I1124 02:08:49.899052 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-7grgk_c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:50 crc kubenswrapper[4755]: I1124 02:08:50.009476 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" path="/var/lib/kubelet/pods/9d93c119-37ee-4cf6-a01e-67c40d43f9ce/volumes" Nov 24 02:08:50 crc kubenswrapper[4755]: I1124 02:08:50.013781 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_23d50e60-91da-42c3-8d11-5c22eab88929/tempest-tests-tempest-tests-runner/0.log" Nov 24 02:08:50 crc kubenswrapper[4755]: I1124 02:08:50.120152 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_19e2c4ea-ac40-4007-b635-f5decba54fc3/test-operator-logs-container/0.log" Nov 24 02:08:50 crc kubenswrapper[4755]: I1124 02:08:50.192956 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-phn9q_dac685c9-1650-4372-9f79-0c359d3169eb/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:08:59 crc kubenswrapper[4755]: I1124 02:08:59.330731 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_fa97a0b2-add8-4532-ab38-d726de9f0a60/memcached/0.log" Nov 24 02:09:03 crc kubenswrapper[4755]: I1124 02:09:03.295258 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:09:03 crc kubenswrapper[4755]: I1124 02:09:03.295846 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:09:12 crc kubenswrapper[4755]: I1124 02:09:12.553535 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/util/0.log" Nov 24 02:09:12 crc kubenswrapper[4755]: I1124 02:09:12.703008 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/util/0.log" Nov 24 02:09:12 crc kubenswrapper[4755]: I1124 02:09:12.742736 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/pull/0.log" Nov 24 02:09:12 crc kubenswrapper[4755]: I1124 02:09:12.746243 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/pull/0.log" Nov 24 02:09:12 crc kubenswrapper[4755]: I1124 02:09:12.911010 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/util/0.log" Nov 24 02:09:12 crc kubenswrapper[4755]: I1124 02:09:12.931017 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/extract/0.log" Nov 24 02:09:12 crc kubenswrapper[4755]: I1124 02:09:12.940044 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/pull/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.082451 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-75fb479bcc-b297l_1660a0eb-228b-41bc-a360-a71fec20d415/kube-rbac-proxy/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.199647 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-75fb479bcc-b297l_1660a0eb-228b-41bc-a360-a71fec20d415/manager/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.200452 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6498cbf48f-hh7kh_fc04cdee-f1bd-4d40-9c1c-02f4e9661851/kube-rbac-proxy/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.334783 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6498cbf48f-hh7kh_fc04cdee-f1bd-4d40-9c1c-02f4e9661851/manager/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.390165 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-767ccfd65f-fdwk5_e84b7100-14c9-436c-97e5-d14c2455b42a/kube-rbac-proxy/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.392761 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-767ccfd65f-fdwk5_e84b7100-14c9-436c-97e5-d14c2455b42a/manager/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.505517 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7969689c84-bnmvb_a22ef49d-f887-41f4-ad37-6b1b0bf7a748/kube-rbac-proxy/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.651494 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7969689c84-bnmvb_a22ef49d-f887-41f4-ad37-6b1b0bf7a748/manager/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.704311 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-56f54d6746-45h29_22780566-edb3-47e3-b3ea-a42def0f4460/kube-rbac-proxy/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.737620 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-56f54d6746-45h29_22780566-edb3-47e3-b3ea-a42def0f4460/manager/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.813011 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-598f69df5d-97znt_77153df1-136d-456e-a6e0-817b2f633d3e/kube-rbac-proxy/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.904580 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-598f69df5d-97znt_77153df1-136d-456e-a6e0-817b2f633d3e/manager/0.log" Nov 24 02:09:13 crc kubenswrapper[4755]: I1124 02:09:13.991572 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-6dd8864d7c-zsbst_fd91f6d6-1cc9-4350-a22a-b3859073f6e0/kube-rbac-proxy/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.192478 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-99b499f4-p9wgw_f443bd2d-3e36-44eb-9684-8ec505b8bea7/kube-rbac-proxy/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.192842 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-6dd8864d7c-zsbst_fd91f6d6-1cc9-4350-a22a-b3859073f6e0/manager/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.213845 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-99b499f4-p9wgw_f443bd2d-3e36-44eb-9684-8ec505b8bea7/manager/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.369844 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7454b96578-5622b_dd14c3fa-bb96-4795-b339-a506c71b16a2/kube-rbac-proxy/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.471365 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7454b96578-5622b_dd14c3fa-bb96-4795-b339-a506c71b16a2/manager/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.555839 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-58f887965d-2rnh9_33d6bfe7-943b-4a59-bfdd-e240b869163d/manager/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.558015 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-58f887965d-2rnh9_33d6bfe7-943b-4a59-bfdd-e240b869163d/kube-rbac-proxy/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.697776 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-54b5986bb8-gqlfb_1a7c3ac2-1c0f-474e-837c-b80226975978/kube-rbac-proxy/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.790370 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-54b5986bb8-gqlfb_1a7c3ac2-1c0f-474e-837c-b80226975978/manager/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.887081 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-78bd47f458-pcp8q_9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19/kube-rbac-proxy/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.930792 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-78bd47f458-pcp8q_9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19/manager/0.log" Nov 24 02:09:14 crc kubenswrapper[4755]: I1124 02:09:14.958958 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-cfbb9c588-prcbz_9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07/kube-rbac-proxy/0.log" Nov 24 02:09:15 crc kubenswrapper[4755]: I1124 02:09:15.133014 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-54cfbf4c7d-7t6ft_3c770fe2-ea89-4ba8-b4f0-95a4f310ea65/kube-rbac-proxy/0.log" Nov 24 02:09:15 crc kubenswrapper[4755]: I1124 02:09:15.179436 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-54cfbf4c7d-7t6ft_3c770fe2-ea89-4ba8-b4f0-95a4f310ea65/manager/0.log" Nov 24 02:09:15 crc kubenswrapper[4755]: I1124 02:09:15.182286 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-cfbb9c588-prcbz_9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07/manager/0.log" Nov 24 02:09:15 crc kubenswrapper[4755]: I1124 02:09:15.290647 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv_f013c70d-8c89-40f5-a132-393403d297c2/kube-rbac-proxy/0.log" Nov 24 02:09:15 crc kubenswrapper[4755]: I1124 02:09:15.383017 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv_f013c70d-8c89-40f5-a132-393403d297c2/manager/0.log" Nov 24 02:09:15 crc kubenswrapper[4755]: I1124 02:09:15.494768 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-68786bb554-2pljv_8695f181-2de4-4fa8-b952-8208ab710b94/kube-rbac-proxy/0.log" Nov 24 02:09:15 crc kubenswrapper[4755]: I1124 02:09:15.656369 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-95bdd885d-rpv4q_3730c324-139f-4560-ac12-c8e0595a58cb/kube-rbac-proxy/0.log" Nov 24 02:09:15 crc kubenswrapper[4755]: I1124 02:09:15.890085 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-95bdd885d-rpv4q_3730c324-139f-4560-ac12-c8e0595a58cb/operator/0.log" Nov 24 02:09:15 crc kubenswrapper[4755]: I1124 02:09:15.918309 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-7f5r2_ccfae768-f324-4db5-ac90-8fd333deca44/registry-server/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.178931 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-54fc5f65b7-b94wx_73185acc-71f3-452e-8454-ebad97b6c6ad/kube-rbac-proxy/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.180339 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-54fc5f65b7-b94wx_73185acc-71f3-452e-8454-ebad97b6c6ad/manager/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.321688 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b797b8dff-s22mt_eb1590a5-3843-4540-ac41-bdfe49ae6569/kube-rbac-proxy/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.403364 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b797b8dff-s22mt_eb1590a5-3843-4540-ac41-bdfe49ae6569/manager/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.435176 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d_2107694a-19fc-40cd-9ef2-b8b60b8b88e2/operator/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.564678 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-68786bb554-2pljv_8695f181-2de4-4fa8-b952-8208ab710b94/manager/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.588731 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-d656998f4-kbj7b_62e235a5-7928-4e26-9948-a3d2a829ef23/kube-rbac-proxy/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.651988 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-d656998f4-kbj7b_62e235a5-7928-4e26-9948-a3d2a829ef23/manager/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.717709 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6d4bf84b58-7pmzf_25b69b88-4612-4183-a978-b9dd58502d37/kube-rbac-proxy/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.845667 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6d4bf84b58-7pmzf_25b69b88-4612-4183-a978-b9dd58502d37/manager/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.898449 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-b4c496f69-n9dqh_8914c196-79e5-456c-9a42-1f4464f8dbf8/kube-rbac-proxy/0.log" Nov 24 02:09:16 crc kubenswrapper[4755]: I1124 02:09:16.934018 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-b4c496f69-n9dqh_8914c196-79e5-456c-9a42-1f4464f8dbf8/manager/0.log" Nov 24 02:09:17 crc kubenswrapper[4755]: I1124 02:09:17.002526 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-8c6448b9f-j56hj_03ab2bfa-29d5-408b-8d69-54b8b367be23/kube-rbac-proxy/0.log" Nov 24 02:09:17 crc kubenswrapper[4755]: I1124 02:09:17.020779 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-8c6448b9f-j56hj_03ab2bfa-29d5-408b-8d69-54b8b367be23/manager/0.log" Nov 24 02:09:31 crc kubenswrapper[4755]: I1124 02:09:31.692244 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-6qvk7_fa143983-92e8-480e-9bb3-928892077000/control-plane-machine-set-operator/0.log" Nov 24 02:09:31 crc kubenswrapper[4755]: I1124 02:09:31.861922 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9f7kc_fceb51f9-deec-4840-86d0-a67228819bef/kube-rbac-proxy/0.log" Nov 24 02:09:31 crc kubenswrapper[4755]: I1124 02:09:31.884609 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9f7kc_fceb51f9-deec-4840-86d0-a67228819bef/machine-api-operator/0.log" Nov 24 02:09:33 crc kubenswrapper[4755]: I1124 02:09:33.295056 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:09:33 crc kubenswrapper[4755]: I1124 02:09:33.295633 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:09:44 crc kubenswrapper[4755]: I1124 02:09:44.523537 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-vwz4w_4b370b30-6433-4155-be26-46a905bb6b3d/cert-manager-controller/0.log" Nov 24 02:09:44 crc kubenswrapper[4755]: I1124 02:09:44.661561 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-kvcl8_844c92d0-68cd-410c-ba95-f440eb5bfcfc/cert-manager-cainjector/0.log" Nov 24 02:09:44 crc kubenswrapper[4755]: I1124 02:09:44.729015 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-t6d9j_6c7e5cd9-3286-4a20-849c-92cc8c910e10/cert-manager-webhook/0.log" Nov 24 02:09:56 crc kubenswrapper[4755]: I1124 02:09:56.764748 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-5874bd7bc5-bjf4f_b0fbafa4-291f-4eee-8133-30e7a85ff7ff/nmstate-console-plugin/0.log" Nov 24 02:09:56 crc kubenswrapper[4755]: I1124 02:09:56.931410 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-7hlnk_3e04477a-f03b-4cd7-ba29-1622ea087da5/nmstate-handler/0.log" Nov 24 02:09:56 crc kubenswrapper[4755]: I1124 02:09:56.993262 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-db5zr_e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277/kube-rbac-proxy/0.log" Nov 24 02:09:57 crc kubenswrapper[4755]: I1124 02:09:57.016215 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-db5zr_e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277/nmstate-metrics/0.log" Nov 24 02:09:57 crc kubenswrapper[4755]: I1124 02:09:57.124707 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-557fdffb88-qmm9f_0594f79b-cc74-4be7-a0c0-605666ea9f19/nmstate-operator/0.log" Nov 24 02:09:57 crc kubenswrapper[4755]: I1124 02:09:57.201931 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6b89b748d8-6dn5d_d2fd6ef2-fcd0-4169-a26a-4f30b0619efa/nmstate-webhook/0.log" Nov 24 02:10:03 crc kubenswrapper[4755]: I1124 02:10:03.294998 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:10:03 crc kubenswrapper[4755]: I1124 02:10:03.295535 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:10:03 crc kubenswrapper[4755]: I1124 02:10:03.295590 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 02:10:03 crc kubenswrapper[4755]: I1124 02:10:03.296364 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8d58e541d2b2ce30c0dc08c9a1783324e6dc27150018b18022e3de27e7efa390"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 02:10:03 crc kubenswrapper[4755]: I1124 02:10:03.296419 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://8d58e541d2b2ce30c0dc08c9a1783324e6dc27150018b18022e3de27e7efa390" gracePeriod=600 Nov 24 02:10:04 crc kubenswrapper[4755]: I1124 02:10:04.062740 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="8d58e541d2b2ce30c0dc08c9a1783324e6dc27150018b18022e3de27e7efa390" exitCode=0 Nov 24 02:10:04 crc kubenswrapper[4755]: I1124 02:10:04.062839 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"8d58e541d2b2ce30c0dc08c9a1783324e6dc27150018b18022e3de27e7efa390"} Nov 24 02:10:04 crc kubenswrapper[4755]: I1124 02:10:04.063069 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349"} Nov 24 02:10:04 crc kubenswrapper[4755]: I1124 02:10:04.063096 4755 scope.go:117] "RemoveContainer" containerID="26c00dd7d27738208405e6e7edcb38f31b055da14fe6cc90de3d8ac137ba77f7" Nov 24 02:10:10 crc kubenswrapper[4755]: I1124 02:10:10.620513 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-lz2vt_cf1d6671-b748-4db1-89b9-9ae4968f8297/kube-rbac-proxy/0.log" Nov 24 02:10:10 crc kubenswrapper[4755]: I1124 02:10:10.782729 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-lz2vt_cf1d6671-b748-4db1-89b9-9ae4968f8297/controller/0.log" Nov 24 02:10:10 crc kubenswrapper[4755]: I1124 02:10:10.838228 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-frr-files/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.017071 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-metrics/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.019067 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-reloader/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.021301 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-frr-files/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.100625 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-reloader/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.236432 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-frr-files/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.264211 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-reloader/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.265303 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-metrics/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.292430 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-metrics/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.452980 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-reloader/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.461034 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-metrics/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.467896 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-frr-files/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.498472 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/controller/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.628109 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/frr-metrics/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.674829 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/kube-rbac-proxy/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.698824 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/kube-rbac-proxy-frr/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.825633 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/reloader/0.log" Nov 24 02:10:11 crc kubenswrapper[4755]: I1124 02:10:11.943972 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-6998585d5-fd8hk_326eacf2-4f20-4577-b64c-5e5a55b8667a/frr-k8s-webhook-server/0.log" Nov 24 02:10:12 crc kubenswrapper[4755]: I1124 02:10:12.149454 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-55d87b5596-gzttc_546c69cc-7307-405e-b5db-22ab6f25b47d/manager/0.log" Nov 24 02:10:12 crc kubenswrapper[4755]: I1124 02:10:12.317163 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7d8857ff56-s2ljm_19c770cd-3557-4cc8-a06d-0597e9766be2/webhook-server/0.log" Nov 24 02:10:12 crc kubenswrapper[4755]: I1124 02:10:12.493242 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-4qvvw_06358ec3-9d2a-433d-8de9-5044c2e189a4/kube-rbac-proxy/0.log" Nov 24 02:10:13 crc kubenswrapper[4755]: I1124 02:10:13.087513 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-4qvvw_06358ec3-9d2a-433d-8de9-5044c2e189a4/speaker/0.log" Nov 24 02:10:13 crc kubenswrapper[4755]: I1124 02:10:13.193636 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/frr/0.log" Nov 24 02:10:24 crc kubenswrapper[4755]: I1124 02:10:24.341248 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/util/0.log" Nov 24 02:10:24 crc kubenswrapper[4755]: I1124 02:10:24.607967 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/pull/0.log" Nov 24 02:10:24 crc kubenswrapper[4755]: I1124 02:10:24.615488 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/util/0.log" Nov 24 02:10:24 crc kubenswrapper[4755]: I1124 02:10:24.654987 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/pull/0.log" Nov 24 02:10:24 crc kubenswrapper[4755]: I1124 02:10:24.750673 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/util/0.log" Nov 24 02:10:24 crc kubenswrapper[4755]: I1124 02:10:24.796852 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/pull/0.log" Nov 24 02:10:24 crc kubenswrapper[4755]: I1124 02:10:24.818077 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/extract/0.log" Nov 24 02:10:24 crc kubenswrapper[4755]: I1124 02:10:24.918195 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-utilities/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.069949 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-utilities/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.076739 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-content/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.121077 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-content/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.286054 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-content/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.297018 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-utilities/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.531048 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-utilities/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.642430 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/registry-server/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.699223 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-utilities/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.750902 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-content/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.767141 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-content/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.866396 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-content/0.log" Nov 24 02:10:25 crc kubenswrapper[4755]: I1124 02:10:25.867317 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-utilities/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.108423 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/util/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.275785 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/util/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.325390 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/pull/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.325908 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/pull/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.396679 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/registry-server/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.527006 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/pull/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.529388 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/extract/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.534022 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/util/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.693897 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bs6fj_be6e8d7e-1c19-449b-a7f5-c104a92edf7c/marketplace-operator/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.730697 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-utilities/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.916228 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-utilities/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.946016 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-content/0.log" Nov 24 02:10:26 crc kubenswrapper[4755]: I1124 02:10:26.975285 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-content/0.log" Nov 24 02:10:27 crc kubenswrapper[4755]: I1124 02:10:27.130900 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-utilities/0.log" Nov 24 02:10:27 crc kubenswrapper[4755]: I1124 02:10:27.138007 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-content/0.log" Nov 24 02:10:27 crc kubenswrapper[4755]: I1124 02:10:27.234208 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/registry-server/0.log" Nov 24 02:10:27 crc kubenswrapper[4755]: I1124 02:10:27.327141 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-utilities/0.log" Nov 24 02:10:27 crc kubenswrapper[4755]: I1124 02:10:27.486523 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-content/0.log" Nov 24 02:10:27 crc kubenswrapper[4755]: I1124 02:10:27.522891 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-utilities/0.log" Nov 24 02:10:27 crc kubenswrapper[4755]: I1124 02:10:27.545077 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-content/0.log" Nov 24 02:10:27 crc kubenswrapper[4755]: I1124 02:10:27.685999 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-utilities/0.log" Nov 24 02:10:27 crc kubenswrapper[4755]: I1124 02:10:27.696944 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-content/0.log" Nov 24 02:10:28 crc kubenswrapper[4755]: I1124 02:10:28.128527 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/registry-server/0.log" Nov 24 02:10:54 crc kubenswrapper[4755]: I1124 02:10:54.854949 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zxjbj"] Nov 24 02:10:54 crc kubenswrapper[4755]: E1124 02:10:54.855998 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerName="registry-server" Nov 24 02:10:54 crc kubenswrapper[4755]: I1124 02:10:54.856015 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerName="registry-server" Nov 24 02:10:54 crc kubenswrapper[4755]: E1124 02:10:54.856066 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerName="extract-utilities" Nov 24 02:10:54 crc kubenswrapper[4755]: I1124 02:10:54.856075 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerName="extract-utilities" Nov 24 02:10:54 crc kubenswrapper[4755]: E1124 02:10:54.856086 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerName="extract-content" Nov 24 02:10:54 crc kubenswrapper[4755]: I1124 02:10:54.856093 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerName="extract-content" Nov 24 02:10:54 crc kubenswrapper[4755]: I1124 02:10:54.856317 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d93c119-37ee-4cf6-a01e-67c40d43f9ce" containerName="registry-server" Nov 24 02:10:54 crc kubenswrapper[4755]: I1124 02:10:54.857974 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:54 crc kubenswrapper[4755]: I1124 02:10:54.860081 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zxjbj"] Nov 24 02:10:54 crc kubenswrapper[4755]: I1124 02:10:54.998339 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-utilities\") pod \"certified-operators-zxjbj\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:54 crc kubenswrapper[4755]: I1124 02:10:54.998788 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb8cb\" (UniqueName: \"kubernetes.io/projected/debc1ed6-04be-4f99-876f-1ffc72b01509-kube-api-access-lb8cb\") pod \"certified-operators-zxjbj\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:54 crc kubenswrapper[4755]: I1124 02:10:54.998857 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-catalog-content\") pod \"certified-operators-zxjbj\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:55 crc kubenswrapper[4755]: I1124 02:10:55.101147 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb8cb\" (UniqueName: \"kubernetes.io/projected/debc1ed6-04be-4f99-876f-1ffc72b01509-kube-api-access-lb8cb\") pod \"certified-operators-zxjbj\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:55 crc kubenswrapper[4755]: I1124 02:10:55.101227 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-catalog-content\") pod \"certified-operators-zxjbj\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:55 crc kubenswrapper[4755]: I1124 02:10:55.101340 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-utilities\") pod \"certified-operators-zxjbj\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:55 crc kubenswrapper[4755]: I1124 02:10:55.101879 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-utilities\") pod \"certified-operators-zxjbj\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:55 crc kubenswrapper[4755]: I1124 02:10:55.101982 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-catalog-content\") pod \"certified-operators-zxjbj\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:55 crc kubenswrapper[4755]: I1124 02:10:55.123256 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb8cb\" (UniqueName: \"kubernetes.io/projected/debc1ed6-04be-4f99-876f-1ffc72b01509-kube-api-access-lb8cb\") pod \"certified-operators-zxjbj\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:55 crc kubenswrapper[4755]: I1124 02:10:55.202021 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:10:56 crc kubenswrapper[4755]: I1124 02:10:56.054362 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zxjbj"] Nov 24 02:10:56 crc kubenswrapper[4755]: I1124 02:10:56.560965 4755 generic.go:334] "Generic (PLEG): container finished" podID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerID="a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30" exitCode=0 Nov 24 02:10:56 crc kubenswrapper[4755]: I1124 02:10:56.561368 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zxjbj" event={"ID":"debc1ed6-04be-4f99-876f-1ffc72b01509","Type":"ContainerDied","Data":"a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30"} Nov 24 02:10:56 crc kubenswrapper[4755]: I1124 02:10:56.561402 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zxjbj" event={"ID":"debc1ed6-04be-4f99-876f-1ffc72b01509","Type":"ContainerStarted","Data":"6c6fff2eac5f234263419ad407d7a3813196689704482f32659c557a827997fa"} Nov 24 02:10:56 crc kubenswrapper[4755]: I1124 02:10:56.565351 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 02:10:58 crc kubenswrapper[4755]: I1124 02:10:58.589781 4755 generic.go:334] "Generic (PLEG): container finished" podID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerID="ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a" exitCode=0 Nov 24 02:10:58 crc kubenswrapper[4755]: I1124 02:10:58.589881 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zxjbj" event={"ID":"debc1ed6-04be-4f99-876f-1ffc72b01509","Type":"ContainerDied","Data":"ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a"} Nov 24 02:10:59 crc kubenswrapper[4755]: I1124 02:10:59.601782 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zxjbj" event={"ID":"debc1ed6-04be-4f99-876f-1ffc72b01509","Type":"ContainerStarted","Data":"294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9"} Nov 24 02:10:59 crc kubenswrapper[4755]: I1124 02:10:59.622984 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zxjbj" podStartSLOduration=3.200244515 podStartE2EDuration="5.622963719s" podCreationTimestamp="2025-11-24 02:10:54 +0000 UTC" firstStartedPulling="2025-11-24 02:10:56.565108944 +0000 UTC m=+3481.251174445" lastFinishedPulling="2025-11-24 02:10:58.987828148 +0000 UTC m=+3483.673893649" observedRunningTime="2025-11-24 02:10:59.617860255 +0000 UTC m=+3484.303925756" watchObservedRunningTime="2025-11-24 02:10:59.622963719 +0000 UTC m=+3484.309029220" Nov 24 02:11:05 crc kubenswrapper[4755]: I1124 02:11:05.202734 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:11:05 crc kubenswrapper[4755]: I1124 02:11:05.204804 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:11:05 crc kubenswrapper[4755]: I1124 02:11:05.254060 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:11:05 crc kubenswrapper[4755]: I1124 02:11:05.770515 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:11:05 crc kubenswrapper[4755]: I1124 02:11:05.823673 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zxjbj"] Nov 24 02:11:07 crc kubenswrapper[4755]: I1124 02:11:07.706902 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zxjbj" podUID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerName="registry-server" containerID="cri-o://294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9" gracePeriod=2 Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.224618 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.390349 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb8cb\" (UniqueName: \"kubernetes.io/projected/debc1ed6-04be-4f99-876f-1ffc72b01509-kube-api-access-lb8cb\") pod \"debc1ed6-04be-4f99-876f-1ffc72b01509\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.390505 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-utilities\") pod \"debc1ed6-04be-4f99-876f-1ffc72b01509\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.390540 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-catalog-content\") pod \"debc1ed6-04be-4f99-876f-1ffc72b01509\" (UID: \"debc1ed6-04be-4f99-876f-1ffc72b01509\") " Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.391292 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-utilities" (OuterVolumeSpecName: "utilities") pod "debc1ed6-04be-4f99-876f-1ffc72b01509" (UID: "debc1ed6-04be-4f99-876f-1ffc72b01509"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.396539 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/debc1ed6-04be-4f99-876f-1ffc72b01509-kube-api-access-lb8cb" (OuterVolumeSpecName: "kube-api-access-lb8cb") pod "debc1ed6-04be-4f99-876f-1ffc72b01509" (UID: "debc1ed6-04be-4f99-876f-1ffc72b01509"). InnerVolumeSpecName "kube-api-access-lb8cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.483477 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "debc1ed6-04be-4f99-876f-1ffc72b01509" (UID: "debc1ed6-04be-4f99-876f-1ffc72b01509"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.493309 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.493331 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/debc1ed6-04be-4f99-876f-1ffc72b01509-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.493343 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb8cb\" (UniqueName: \"kubernetes.io/projected/debc1ed6-04be-4f99-876f-1ffc72b01509-kube-api-access-lb8cb\") on node \"crc\" DevicePath \"\"" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.719652 4755 generic.go:334] "Generic (PLEG): container finished" podID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerID="294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9" exitCode=0 Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.719695 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zxjbj" event={"ID":"debc1ed6-04be-4f99-876f-1ffc72b01509","Type":"ContainerDied","Data":"294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9"} Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.719721 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zxjbj" event={"ID":"debc1ed6-04be-4f99-876f-1ffc72b01509","Type":"ContainerDied","Data":"6c6fff2eac5f234263419ad407d7a3813196689704482f32659c557a827997fa"} Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.719738 4755 scope.go:117] "RemoveContainer" containerID="294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.719754 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zxjbj" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.756820 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zxjbj"] Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.763476 4755 scope.go:117] "RemoveContainer" containerID="ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.779114 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zxjbj"] Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.806893 4755 scope.go:117] "RemoveContainer" containerID="a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.859185 4755 scope.go:117] "RemoveContainer" containerID="294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9" Nov 24 02:11:08 crc kubenswrapper[4755]: E1124 02:11:08.859698 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9\": container with ID starting with 294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9 not found: ID does not exist" containerID="294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.859745 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9"} err="failed to get container status \"294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9\": rpc error: code = NotFound desc = could not find container \"294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9\": container with ID starting with 294521d0fdf771b60fd27823630a65c0a314784ccd14a2f996337741671270c9 not found: ID does not exist" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.859773 4755 scope.go:117] "RemoveContainer" containerID="ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a" Nov 24 02:11:08 crc kubenswrapper[4755]: E1124 02:11:08.860144 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a\": container with ID starting with ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a not found: ID does not exist" containerID="ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.860167 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a"} err="failed to get container status \"ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a\": rpc error: code = NotFound desc = could not find container \"ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a\": container with ID starting with ae7e7be77b335eb1600debbb3062a1351c5d56726f4188029b5d4cf0e96d594a not found: ID does not exist" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.860183 4755 scope.go:117] "RemoveContainer" containerID="a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30" Nov 24 02:11:08 crc kubenswrapper[4755]: E1124 02:11:08.860359 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30\": container with ID starting with a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30 not found: ID does not exist" containerID="a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30" Nov 24 02:11:08 crc kubenswrapper[4755]: I1124 02:11:08.860379 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30"} err="failed to get container status \"a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30\": rpc error: code = NotFound desc = could not find container \"a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30\": container with ID starting with a82f44597cd3f2ffa014318300d217dd62beaaba6828adbe147bb657b287fa30 not found: ID does not exist" Nov 24 02:11:10 crc kubenswrapper[4755]: I1124 02:11:10.024117 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="debc1ed6-04be-4f99-876f-1ffc72b01509" path="/var/lib/kubelet/pods/debc1ed6-04be-4f99-876f-1ffc72b01509/volumes" Nov 24 02:12:03 crc kubenswrapper[4755]: I1124 02:12:03.295665 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:12:03 crc kubenswrapper[4755]: I1124 02:12:03.296240 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:12:08 crc kubenswrapper[4755]: I1124 02:12:08.297986 4755 generic.go:334] "Generic (PLEG): container finished" podID="a93b8fb8-f220-4eac-9433-73cbeca1a486" containerID="84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403" exitCode=0 Nov 24 02:12:08 crc kubenswrapper[4755]: I1124 02:12:08.298081 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zx8hf/must-gather-dnqqd" event={"ID":"a93b8fb8-f220-4eac-9433-73cbeca1a486","Type":"ContainerDied","Data":"84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403"} Nov 24 02:12:08 crc kubenswrapper[4755]: I1124 02:12:08.299298 4755 scope.go:117] "RemoveContainer" containerID="84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403" Nov 24 02:12:08 crc kubenswrapper[4755]: I1124 02:12:08.818860 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zx8hf_must-gather-dnqqd_a93b8fb8-f220-4eac-9433-73cbeca1a486/gather/0.log" Nov 24 02:12:16 crc kubenswrapper[4755]: I1124 02:12:16.348143 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zx8hf/must-gather-dnqqd"] Nov 24 02:12:16 crc kubenswrapper[4755]: I1124 02:12:16.350643 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-zx8hf/must-gather-dnqqd" podUID="a93b8fb8-f220-4eac-9433-73cbeca1a486" containerName="copy" containerID="cri-o://af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039" gracePeriod=2 Nov 24 02:12:16 crc kubenswrapper[4755]: I1124 02:12:16.356626 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zx8hf/must-gather-dnqqd"] Nov 24 02:12:16 crc kubenswrapper[4755]: I1124 02:12:16.789131 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zx8hf_must-gather-dnqqd_a93b8fb8-f220-4eac-9433-73cbeca1a486/copy/0.log" Nov 24 02:12:16 crc kubenswrapper[4755]: I1124 02:12:16.789987 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/must-gather-dnqqd" Nov 24 02:12:16 crc kubenswrapper[4755]: I1124 02:12:16.901933 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a93b8fb8-f220-4eac-9433-73cbeca1a486-must-gather-output\") pod \"a93b8fb8-f220-4eac-9433-73cbeca1a486\" (UID: \"a93b8fb8-f220-4eac-9433-73cbeca1a486\") " Nov 24 02:12:16 crc kubenswrapper[4755]: I1124 02:12:16.902140 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ljnt\" (UniqueName: \"kubernetes.io/projected/a93b8fb8-f220-4eac-9433-73cbeca1a486-kube-api-access-6ljnt\") pod \"a93b8fb8-f220-4eac-9433-73cbeca1a486\" (UID: \"a93b8fb8-f220-4eac-9433-73cbeca1a486\") " Nov 24 02:12:16 crc kubenswrapper[4755]: I1124 02:12:16.908103 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a93b8fb8-f220-4eac-9433-73cbeca1a486-kube-api-access-6ljnt" (OuterVolumeSpecName: "kube-api-access-6ljnt") pod "a93b8fb8-f220-4eac-9433-73cbeca1a486" (UID: "a93b8fb8-f220-4eac-9433-73cbeca1a486"). InnerVolumeSpecName "kube-api-access-6ljnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.004465 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ljnt\" (UniqueName: \"kubernetes.io/projected/a93b8fb8-f220-4eac-9433-73cbeca1a486-kube-api-access-6ljnt\") on node \"crc\" DevicePath \"\"" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.044553 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a93b8fb8-f220-4eac-9433-73cbeca1a486-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a93b8fb8-f220-4eac-9433-73cbeca1a486" (UID: "a93b8fb8-f220-4eac-9433-73cbeca1a486"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.108043 4755 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a93b8fb8-f220-4eac-9433-73cbeca1a486-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.390291 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zx8hf_must-gather-dnqqd_a93b8fb8-f220-4eac-9433-73cbeca1a486/copy/0.log" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.390897 4755 generic.go:334] "Generic (PLEG): container finished" podID="a93b8fb8-f220-4eac-9433-73cbeca1a486" containerID="af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039" exitCode=143 Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.390973 4755 scope.go:117] "RemoveContainer" containerID="af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.390973 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zx8hf/must-gather-dnqqd" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.413296 4755 scope.go:117] "RemoveContainer" containerID="84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.466010 4755 scope.go:117] "RemoveContainer" containerID="af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039" Nov 24 02:12:17 crc kubenswrapper[4755]: E1124 02:12:17.466518 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039\": container with ID starting with af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039 not found: ID does not exist" containerID="af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.466567 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039"} err="failed to get container status \"af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039\": rpc error: code = NotFound desc = could not find container \"af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039\": container with ID starting with af5be07fe1b44c929a620abbdc5d6d042e3b5e3cccf4edbea1362746d88ec039 not found: ID does not exist" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.466593 4755 scope.go:117] "RemoveContainer" containerID="84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403" Nov 24 02:12:17 crc kubenswrapper[4755]: E1124 02:12:17.466836 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403\": container with ID starting with 84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403 not found: ID does not exist" containerID="84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403" Nov 24 02:12:17 crc kubenswrapper[4755]: I1124 02:12:17.466862 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403"} err="failed to get container status \"84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403\": rpc error: code = NotFound desc = could not find container \"84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403\": container with ID starting with 84f2c3e3e4177436c2b3632ddeeeef8080c02940c1e49c3be2ba560d41a1c403 not found: ID does not exist" Nov 24 02:12:18 crc kubenswrapper[4755]: I1124 02:12:18.006765 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a93b8fb8-f220-4eac-9433-73cbeca1a486" path="/var/lib/kubelet/pods/a93b8fb8-f220-4eac-9433-73cbeca1a486/volumes" Nov 24 02:12:33 crc kubenswrapper[4755]: I1124 02:12:33.295152 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:12:33 crc kubenswrapper[4755]: I1124 02:12:33.295710 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:13:03 crc kubenswrapper[4755]: I1124 02:13:03.295407 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:13:03 crc kubenswrapper[4755]: I1124 02:13:03.296159 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:13:03 crc kubenswrapper[4755]: I1124 02:13:03.296226 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 02:13:03 crc kubenswrapper[4755]: I1124 02:13:03.297234 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 02:13:03 crc kubenswrapper[4755]: I1124 02:13:03.297314 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" gracePeriod=600 Nov 24 02:13:03 crc kubenswrapper[4755]: E1124 02:13:03.440777 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:13:03 crc kubenswrapper[4755]: I1124 02:13:03.819508 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" exitCode=0 Nov 24 02:13:03 crc kubenswrapper[4755]: I1124 02:13:03.819558 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349"} Nov 24 02:13:03 crc kubenswrapper[4755]: I1124 02:13:03.819830 4755 scope.go:117] "RemoveContainer" containerID="8d58e541d2b2ce30c0dc08c9a1783324e6dc27150018b18022e3de27e7efa390" Nov 24 02:13:03 crc kubenswrapper[4755]: I1124 02:13:03.820453 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:13:03 crc kubenswrapper[4755]: E1124 02:13:03.820692 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:13:16 crc kubenswrapper[4755]: I1124 02:13:16.996444 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:13:16 crc kubenswrapper[4755]: E1124 02:13:16.997437 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:13:29 crc kubenswrapper[4755]: I1124 02:13:29.996292 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:13:29 crc kubenswrapper[4755]: E1124 02:13:29.997166 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:13:42 crc kubenswrapper[4755]: I1124 02:13:42.996995 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:13:42 crc kubenswrapper[4755]: E1124 02:13:42.999124 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:13:57 crc kubenswrapper[4755]: I1124 02:13:57.996569 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:13:57 crc kubenswrapper[4755]: E1124 02:13:57.997234 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:14:11 crc kubenswrapper[4755]: I1124 02:14:11.997061 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:14:11 crc kubenswrapper[4755]: E1124 02:14:11.997653 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:14:26 crc kubenswrapper[4755]: I1124 02:14:26.997057 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:14:26 crc kubenswrapper[4755]: E1124 02:14:26.997782 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:14:39 crc kubenswrapper[4755]: I1124 02:14:39.997054 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:14:39 crc kubenswrapper[4755]: E1124 02:14:39.998354 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:14:54 crc kubenswrapper[4755]: I1124 02:14:54.997058 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:14:54 crc kubenswrapper[4755]: E1124 02:14:54.998429 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.409384 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25x59/must-gather-r7rxm"] Nov 24 02:14:57 crc kubenswrapper[4755]: E1124 02:14:57.410168 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerName="extract-utilities" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.410186 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerName="extract-utilities" Nov 24 02:14:57 crc kubenswrapper[4755]: E1124 02:14:57.410206 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a93b8fb8-f220-4eac-9433-73cbeca1a486" containerName="copy" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.410214 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a93b8fb8-f220-4eac-9433-73cbeca1a486" containerName="copy" Nov 24 02:14:57 crc kubenswrapper[4755]: E1124 02:14:57.410230 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerName="extract-content" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.410238 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerName="extract-content" Nov 24 02:14:57 crc kubenswrapper[4755]: E1124 02:14:57.410273 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a93b8fb8-f220-4eac-9433-73cbeca1a486" containerName="gather" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.410281 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a93b8fb8-f220-4eac-9433-73cbeca1a486" containerName="gather" Nov 24 02:14:57 crc kubenswrapper[4755]: E1124 02:14:57.410306 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerName="registry-server" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.410314 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerName="registry-server" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.410525 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a93b8fb8-f220-4eac-9433-73cbeca1a486" containerName="gather" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.410540 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a93b8fb8-f220-4eac-9433-73cbeca1a486" containerName="copy" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.410556 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="debc1ed6-04be-4f99-876f-1ffc72b01509" containerName="registry-server" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.411762 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/must-gather-r7rxm" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.414133 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-25x59"/"kube-root-ca.crt" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.414395 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-25x59"/"openshift-service-ca.crt" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.414540 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-25x59"/"default-dockercfg-lfh8c" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.419596 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-25x59/must-gather-r7rxm"] Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.536645 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/811c205f-798a-47ad-9b10-3fc501a6b9f6-must-gather-output\") pod \"must-gather-r7rxm\" (UID: \"811c205f-798a-47ad-9b10-3fc501a6b9f6\") " pod="openshift-must-gather-25x59/must-gather-r7rxm" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.536713 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2q5p\" (UniqueName: \"kubernetes.io/projected/811c205f-798a-47ad-9b10-3fc501a6b9f6-kube-api-access-d2q5p\") pod \"must-gather-r7rxm\" (UID: \"811c205f-798a-47ad-9b10-3fc501a6b9f6\") " pod="openshift-must-gather-25x59/must-gather-r7rxm" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.638331 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/811c205f-798a-47ad-9b10-3fc501a6b9f6-must-gather-output\") pod \"must-gather-r7rxm\" (UID: \"811c205f-798a-47ad-9b10-3fc501a6b9f6\") " pod="openshift-must-gather-25x59/must-gather-r7rxm" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.638408 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2q5p\" (UniqueName: \"kubernetes.io/projected/811c205f-798a-47ad-9b10-3fc501a6b9f6-kube-api-access-d2q5p\") pod \"must-gather-r7rxm\" (UID: \"811c205f-798a-47ad-9b10-3fc501a6b9f6\") " pod="openshift-must-gather-25x59/must-gather-r7rxm" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.638816 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/811c205f-798a-47ad-9b10-3fc501a6b9f6-must-gather-output\") pod \"must-gather-r7rxm\" (UID: \"811c205f-798a-47ad-9b10-3fc501a6b9f6\") " pod="openshift-must-gather-25x59/must-gather-r7rxm" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.654757 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2q5p\" (UniqueName: \"kubernetes.io/projected/811c205f-798a-47ad-9b10-3fc501a6b9f6-kube-api-access-d2q5p\") pod \"must-gather-r7rxm\" (UID: \"811c205f-798a-47ad-9b10-3fc501a6b9f6\") " pod="openshift-must-gather-25x59/must-gather-r7rxm" Nov 24 02:14:57 crc kubenswrapper[4755]: I1124 02:14:57.727621 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/must-gather-r7rxm" Nov 24 02:14:58 crc kubenswrapper[4755]: I1124 02:14:58.265794 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-25x59/must-gather-r7rxm"] Nov 24 02:14:59 crc kubenswrapper[4755]: I1124 02:14:59.106732 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/must-gather-r7rxm" event={"ID":"811c205f-798a-47ad-9b10-3fc501a6b9f6","Type":"ContainerStarted","Data":"1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d"} Nov 24 02:14:59 crc kubenswrapper[4755]: I1124 02:14:59.107133 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/must-gather-r7rxm" event={"ID":"811c205f-798a-47ad-9b10-3fc501a6b9f6","Type":"ContainerStarted","Data":"3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29"} Nov 24 02:14:59 crc kubenswrapper[4755]: I1124 02:14:59.107150 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/must-gather-r7rxm" event={"ID":"811c205f-798a-47ad-9b10-3fc501a6b9f6","Type":"ContainerStarted","Data":"ca1145c4d3e93782c627eaa12683f645fc09ffc17914d6eec9f1106134b3b6f4"} Nov 24 02:14:59 crc kubenswrapper[4755]: I1124 02:14:59.142229 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-25x59/must-gather-r7rxm" podStartSLOduration=2.142200234 podStartE2EDuration="2.142200234s" podCreationTimestamp="2025-11-24 02:14:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 02:14:59.12825662 +0000 UTC m=+3723.814322151" watchObservedRunningTime="2025-11-24 02:14:59.142200234 +0000 UTC m=+3723.828265775" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.205838 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7"] Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.211998 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.225635 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.225813 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7"] Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.225923 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.300326 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-secret-volume\") pod \"collect-profiles-29399175-tn7m7\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.300401 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-config-volume\") pod \"collect-profiles-29399175-tn7m7\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.300424 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt42w\" (UniqueName: \"kubernetes.io/projected/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-kube-api-access-wt42w\") pod \"collect-profiles-29399175-tn7m7\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.401710 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-secret-volume\") pod \"collect-profiles-29399175-tn7m7\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.402026 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-config-volume\") pod \"collect-profiles-29399175-tn7m7\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.402239 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt42w\" (UniqueName: \"kubernetes.io/projected/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-kube-api-access-wt42w\") pod \"collect-profiles-29399175-tn7m7\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.403002 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-config-volume\") pod \"collect-profiles-29399175-tn7m7\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.423423 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-secret-volume\") pod \"collect-profiles-29399175-tn7m7\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.426729 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt42w\" (UniqueName: \"kubernetes.io/projected/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-kube-api-access-wt42w\") pod \"collect-profiles-29399175-tn7m7\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.556820 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:00 crc kubenswrapper[4755]: I1124 02:15:00.831414 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7"] Nov 24 02:15:01 crc kubenswrapper[4755]: I1124 02:15:01.126168 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" event={"ID":"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e","Type":"ContainerStarted","Data":"4e9fca05214fa7e61ea938908f7822dc05c779cc4dc83357b688c2a711abfb3b"} Nov 24 02:15:01 crc kubenswrapper[4755]: I1124 02:15:01.126212 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" event={"ID":"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e","Type":"ContainerStarted","Data":"afedc12486691e68d7c3eb5e38e83a6fb7dbf6d94955e98be91d6baf02906a2a"} Nov 24 02:15:01 crc kubenswrapper[4755]: I1124 02:15:01.158425 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" podStartSLOduration=1.158405546 podStartE2EDuration="1.158405546s" podCreationTimestamp="2025-11-24 02:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 02:15:01.150337618 +0000 UTC m=+3725.836403139" watchObservedRunningTime="2025-11-24 02:15:01.158405546 +0000 UTC m=+3725.844471047" Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.137700 4755 generic.go:334] "Generic (PLEG): container finished" podID="5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e" containerID="4e9fca05214fa7e61ea938908f7822dc05c779cc4dc83357b688c2a711abfb3b" exitCode=0 Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.137773 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" event={"ID":"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e","Type":"ContainerDied","Data":"4e9fca05214fa7e61ea938908f7822dc05c779cc4dc83357b688c2a711abfb3b"} Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.237363 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25x59/crc-debug-mlp8v"] Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.238592 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-mlp8v" Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.335585 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74876\" (UniqueName: \"kubernetes.io/projected/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-kube-api-access-74876\") pod \"crc-debug-mlp8v\" (UID: \"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f\") " pod="openshift-must-gather-25x59/crc-debug-mlp8v" Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.335752 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-host\") pod \"crc-debug-mlp8v\" (UID: \"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f\") " pod="openshift-must-gather-25x59/crc-debug-mlp8v" Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.437956 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-host\") pod \"crc-debug-mlp8v\" (UID: \"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f\") " pod="openshift-must-gather-25x59/crc-debug-mlp8v" Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.438173 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-host\") pod \"crc-debug-mlp8v\" (UID: \"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f\") " pod="openshift-must-gather-25x59/crc-debug-mlp8v" Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.438208 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74876\" (UniqueName: \"kubernetes.io/projected/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-kube-api-access-74876\") pod \"crc-debug-mlp8v\" (UID: \"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f\") " pod="openshift-must-gather-25x59/crc-debug-mlp8v" Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.473573 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74876\" (UniqueName: \"kubernetes.io/projected/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-kube-api-access-74876\") pod \"crc-debug-mlp8v\" (UID: \"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f\") " pod="openshift-must-gather-25x59/crc-debug-mlp8v" Nov 24 02:15:02 crc kubenswrapper[4755]: I1124 02:15:02.558342 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-mlp8v" Nov 24 02:15:02 crc kubenswrapper[4755]: W1124 02:15:02.617733 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a6cfa24_a493_4143_bee3_f46ed7b4fd1f.slice/crio-aa64542bcc827c47c6bc503530e7d49d1b2577cba7044490cd968cac3dd833c8 WatchSource:0}: Error finding container aa64542bcc827c47c6bc503530e7d49d1b2577cba7044490cd968cac3dd833c8: Status 404 returned error can't find the container with id aa64542bcc827c47c6bc503530e7d49d1b2577cba7044490cd968cac3dd833c8 Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.154720 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/crc-debug-mlp8v" event={"ID":"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f","Type":"ContainerStarted","Data":"26af8c08ca463596685a1bc59fa2fa008196c979d478af9a2bcbc0c1a0275c54"} Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.155291 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/crc-debug-mlp8v" event={"ID":"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f","Type":"ContainerStarted","Data":"aa64542bcc827c47c6bc503530e7d49d1b2577cba7044490cd968cac3dd833c8"} Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.181435 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-25x59/crc-debug-mlp8v" podStartSLOduration=1.18141234 podStartE2EDuration="1.18141234s" podCreationTimestamp="2025-11-24 02:15:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 02:15:03.179410723 +0000 UTC m=+3727.865476224" watchObservedRunningTime="2025-11-24 02:15:03.18141234 +0000 UTC m=+3727.867477841" Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.421819 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.573177 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-secret-volume\") pod \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.573352 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wt42w\" (UniqueName: \"kubernetes.io/projected/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-kube-api-access-wt42w\") pod \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.573440 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-config-volume\") pod \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\" (UID: \"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e\") " Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.574186 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-config-volume" (OuterVolumeSpecName: "config-volume") pod "5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e" (UID: "5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.583477 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e" (UID: "5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.585223 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-kube-api-access-wt42w" (OuterVolumeSpecName: "kube-api-access-wt42w") pod "5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e" (UID: "5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e"). InnerVolumeSpecName "kube-api-access-wt42w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.676056 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wt42w\" (UniqueName: \"kubernetes.io/projected/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-kube-api-access-wt42w\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.676102 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:03 crc kubenswrapper[4755]: I1124 02:15:03.676116 4755 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:04 crc kubenswrapper[4755]: I1124 02:15:04.167314 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" event={"ID":"5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e","Type":"ContainerDied","Data":"afedc12486691e68d7c3eb5e38e83a6fb7dbf6d94955e98be91d6baf02906a2a"} Nov 24 02:15:04 crc kubenswrapper[4755]: I1124 02:15:04.167706 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afedc12486691e68d7c3eb5e38e83a6fb7dbf6d94955e98be91d6baf02906a2a" Nov 24 02:15:04 crc kubenswrapper[4755]: I1124 02:15:04.167432 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399175-tn7m7" Nov 24 02:15:04 crc kubenswrapper[4755]: I1124 02:15:04.495297 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs"] Nov 24 02:15:04 crc kubenswrapper[4755]: I1124 02:15:04.503716 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399130-98xxs"] Nov 24 02:15:04 crc kubenswrapper[4755]: I1124 02:15:04.892550 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vbk6v"] Nov 24 02:15:04 crc kubenswrapper[4755]: E1124 02:15:04.893417 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e" containerName="collect-profiles" Nov 24 02:15:04 crc kubenswrapper[4755]: I1124 02:15:04.893443 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e" containerName="collect-profiles" Nov 24 02:15:04 crc kubenswrapper[4755]: I1124 02:15:04.893672 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f3db3f8-8b7e-4b5b-97b4-9bfdbb4fa28e" containerName="collect-profiles" Nov 24 02:15:04 crc kubenswrapper[4755]: I1124 02:15:04.895123 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:04 crc kubenswrapper[4755]: I1124 02:15:04.911893 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vbk6v"] Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.000478 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-utilities\") pod \"community-operators-vbk6v\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.000568 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bv7d\" (UniqueName: \"kubernetes.io/projected/5df45905-a3d4-409c-9e31-a3060522a66f-kube-api-access-9bv7d\") pod \"community-operators-vbk6v\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.000638 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-catalog-content\") pod \"community-operators-vbk6v\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.102370 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-catalog-content\") pod \"community-operators-vbk6v\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.102508 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-utilities\") pod \"community-operators-vbk6v\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.102638 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bv7d\" (UniqueName: \"kubernetes.io/projected/5df45905-a3d4-409c-9e31-a3060522a66f-kube-api-access-9bv7d\") pod \"community-operators-vbk6v\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.103626 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-catalog-content\") pod \"community-operators-vbk6v\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.104425 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-utilities\") pod \"community-operators-vbk6v\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.134725 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bv7d\" (UniqueName: \"kubernetes.io/projected/5df45905-a3d4-409c-9e31-a3060522a66f-kube-api-access-9bv7d\") pod \"community-operators-vbk6v\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.222267 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:05 crc kubenswrapper[4755]: I1124 02:15:05.829172 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vbk6v"] Nov 24 02:15:06 crc kubenswrapper[4755]: I1124 02:15:06.008416 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f520ef1b-1a13-43c3-95cc-66a957b8e41f" path="/var/lib/kubelet/pods/f520ef1b-1a13-43c3-95cc-66a957b8e41f/volumes" Nov 24 02:15:06 crc kubenswrapper[4755]: I1124 02:15:06.189247 4755 generic.go:334] "Generic (PLEG): container finished" podID="5df45905-a3d4-409c-9e31-a3060522a66f" containerID="21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642" exitCode=0 Nov 24 02:15:06 crc kubenswrapper[4755]: I1124 02:15:06.189308 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbk6v" event={"ID":"5df45905-a3d4-409c-9e31-a3060522a66f","Type":"ContainerDied","Data":"21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642"} Nov 24 02:15:06 crc kubenswrapper[4755]: I1124 02:15:06.189342 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbk6v" event={"ID":"5df45905-a3d4-409c-9e31-a3060522a66f","Type":"ContainerStarted","Data":"e9abcd20e6868d3103d70635bf1170f8a9eca7a50ca36252a8e71fa3a183727e"} Nov 24 02:15:07 crc kubenswrapper[4755]: I1124 02:15:07.200890 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbk6v" event={"ID":"5df45905-a3d4-409c-9e31-a3060522a66f","Type":"ContainerStarted","Data":"2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4"} Nov 24 02:15:08 crc kubenswrapper[4755]: I1124 02:15:08.214756 4755 generic.go:334] "Generic (PLEG): container finished" podID="5df45905-a3d4-409c-9e31-a3060522a66f" containerID="2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4" exitCode=0 Nov 24 02:15:08 crc kubenswrapper[4755]: I1124 02:15:08.215057 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbk6v" event={"ID":"5df45905-a3d4-409c-9e31-a3060522a66f","Type":"ContainerDied","Data":"2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4"} Nov 24 02:15:09 crc kubenswrapper[4755]: I1124 02:15:09.227109 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbk6v" event={"ID":"5df45905-a3d4-409c-9e31-a3060522a66f","Type":"ContainerStarted","Data":"8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337"} Nov 24 02:15:09 crc kubenswrapper[4755]: I1124 02:15:09.252593 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vbk6v" podStartSLOduration=2.824718736 podStartE2EDuration="5.252568044s" podCreationTimestamp="2025-11-24 02:15:04 +0000 UTC" firstStartedPulling="2025-11-24 02:15:06.211701978 +0000 UTC m=+3730.897767479" lastFinishedPulling="2025-11-24 02:15:08.639551286 +0000 UTC m=+3733.325616787" observedRunningTime="2025-11-24 02:15:09.246698119 +0000 UTC m=+3733.932763640" watchObservedRunningTime="2025-11-24 02:15:09.252568044 +0000 UTC m=+3733.938633545" Nov 24 02:15:09 crc kubenswrapper[4755]: I1124 02:15:09.997615 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:15:09 crc kubenswrapper[4755]: E1124 02:15:09.998417 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:15:15 crc kubenswrapper[4755]: I1124 02:15:15.222902 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:15 crc kubenswrapper[4755]: I1124 02:15:15.226204 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:15 crc kubenswrapper[4755]: I1124 02:15:15.271438 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:15 crc kubenswrapper[4755]: I1124 02:15:15.357904 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:15 crc kubenswrapper[4755]: I1124 02:15:15.514685 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vbk6v"] Nov 24 02:15:17 crc kubenswrapper[4755]: I1124 02:15:17.306303 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vbk6v" podUID="5df45905-a3d4-409c-9e31-a3060522a66f" containerName="registry-server" containerID="cri-o://8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337" gracePeriod=2 Nov 24 02:15:17 crc kubenswrapper[4755]: I1124 02:15:17.933682 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.104027 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-utilities\") pod \"5df45905-a3d4-409c-9e31-a3060522a66f\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.104404 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-catalog-content\") pod \"5df45905-a3d4-409c-9e31-a3060522a66f\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.104783 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bv7d\" (UniqueName: \"kubernetes.io/projected/5df45905-a3d4-409c-9e31-a3060522a66f-kube-api-access-9bv7d\") pod \"5df45905-a3d4-409c-9e31-a3060522a66f\" (UID: \"5df45905-a3d4-409c-9e31-a3060522a66f\") " Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.105673 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-utilities" (OuterVolumeSpecName: "utilities") pod "5df45905-a3d4-409c-9e31-a3060522a66f" (UID: "5df45905-a3d4-409c-9e31-a3060522a66f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.117262 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.133977 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5df45905-a3d4-409c-9e31-a3060522a66f-kube-api-access-9bv7d" (OuterVolumeSpecName: "kube-api-access-9bv7d") pod "5df45905-a3d4-409c-9e31-a3060522a66f" (UID: "5df45905-a3d4-409c-9e31-a3060522a66f"). InnerVolumeSpecName "kube-api-access-9bv7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.174066 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5df45905-a3d4-409c-9e31-a3060522a66f" (UID: "5df45905-a3d4-409c-9e31-a3060522a66f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.219262 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5df45905-a3d4-409c-9e31-a3060522a66f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.219296 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bv7d\" (UniqueName: \"kubernetes.io/projected/5df45905-a3d4-409c-9e31-a3060522a66f-kube-api-access-9bv7d\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.317946 4755 generic.go:334] "Generic (PLEG): container finished" podID="5df45905-a3d4-409c-9e31-a3060522a66f" containerID="8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337" exitCode=0 Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.317988 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbk6v" event={"ID":"5df45905-a3d4-409c-9e31-a3060522a66f","Type":"ContainerDied","Data":"8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337"} Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.318012 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vbk6v" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.318026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbk6v" event={"ID":"5df45905-a3d4-409c-9e31-a3060522a66f","Type":"ContainerDied","Data":"e9abcd20e6868d3103d70635bf1170f8a9eca7a50ca36252a8e71fa3a183727e"} Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.318044 4755 scope.go:117] "RemoveContainer" containerID="8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.340460 4755 scope.go:117] "RemoveContainer" containerID="2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.355547 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vbk6v"] Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.363249 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vbk6v"] Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.379239 4755 scope.go:117] "RemoveContainer" containerID="21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.416711 4755 scope.go:117] "RemoveContainer" containerID="8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337" Nov 24 02:15:18 crc kubenswrapper[4755]: E1124 02:15:18.418666 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337\": container with ID starting with 8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337 not found: ID does not exist" containerID="8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.418719 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337"} err="failed to get container status \"8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337\": rpc error: code = NotFound desc = could not find container \"8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337\": container with ID starting with 8dacd0eeb92e8f36bb731efb7bc162c1ec1c5202e082fd83b1f1fc462c47b337 not found: ID does not exist" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.418752 4755 scope.go:117] "RemoveContainer" containerID="2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4" Nov 24 02:15:18 crc kubenswrapper[4755]: E1124 02:15:18.419072 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4\": container with ID starting with 2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4 not found: ID does not exist" containerID="2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.419117 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4"} err="failed to get container status \"2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4\": rpc error: code = NotFound desc = could not find container \"2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4\": container with ID starting with 2554bc5217efa13a1b6ff8d46b5c7fc2547029f4f181586a3fc4d7a7265892f4 not found: ID does not exist" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.419145 4755 scope.go:117] "RemoveContainer" containerID="21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642" Nov 24 02:15:18 crc kubenswrapper[4755]: E1124 02:15:18.419450 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642\": container with ID starting with 21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642 not found: ID does not exist" containerID="21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642" Nov 24 02:15:18 crc kubenswrapper[4755]: I1124 02:15:18.419484 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642"} err="failed to get container status \"21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642\": rpc error: code = NotFound desc = could not find container \"21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642\": container with ID starting with 21c6530c69c1e4feada03bdde5b8f167ebb93cbb8732c8ce996192b372e3a642 not found: ID does not exist" Nov 24 02:15:20 crc kubenswrapper[4755]: I1124 02:15:20.008967 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5df45905-a3d4-409c-9e31-a3060522a66f" path="/var/lib/kubelet/pods/5df45905-a3d4-409c-9e31-a3060522a66f/volumes" Nov 24 02:15:22 crc kubenswrapper[4755]: I1124 02:15:22.996838 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:15:22 crc kubenswrapper[4755]: E1124 02:15:22.998677 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:15:37 crc kubenswrapper[4755]: I1124 02:15:37.996239 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:15:37 crc kubenswrapper[4755]: E1124 02:15:37.996994 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:15:39 crc kubenswrapper[4755]: I1124 02:15:39.493092 4755 generic.go:334] "Generic (PLEG): container finished" podID="1a6cfa24-a493-4143-bee3-f46ed7b4fd1f" containerID="26af8c08ca463596685a1bc59fa2fa008196c979d478af9a2bcbc0c1a0275c54" exitCode=0 Nov 24 02:15:39 crc kubenswrapper[4755]: I1124 02:15:39.493161 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/crc-debug-mlp8v" event={"ID":"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f","Type":"ContainerDied","Data":"26af8c08ca463596685a1bc59fa2fa008196c979d478af9a2bcbc0c1a0275c54"} Nov 24 02:15:40 crc kubenswrapper[4755]: I1124 02:15:40.597507 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-mlp8v" Nov 24 02:15:40 crc kubenswrapper[4755]: I1124 02:15:40.632337 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25x59/crc-debug-mlp8v"] Nov 24 02:15:40 crc kubenswrapper[4755]: I1124 02:15:40.641768 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25x59/crc-debug-mlp8v"] Nov 24 02:15:40 crc kubenswrapper[4755]: I1124 02:15:40.745151 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74876\" (UniqueName: \"kubernetes.io/projected/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-kube-api-access-74876\") pod \"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f\" (UID: \"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f\") " Nov 24 02:15:40 crc kubenswrapper[4755]: I1124 02:15:40.745321 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-host\") pod \"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f\" (UID: \"1a6cfa24-a493-4143-bee3-f46ed7b4fd1f\") " Nov 24 02:15:40 crc kubenswrapper[4755]: I1124 02:15:40.745373 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-host" (OuterVolumeSpecName: "host") pod "1a6cfa24-a493-4143-bee3-f46ed7b4fd1f" (UID: "1a6cfa24-a493-4143-bee3-f46ed7b4fd1f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 02:15:40 crc kubenswrapper[4755]: I1124 02:15:40.745951 4755 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-host\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:40 crc kubenswrapper[4755]: I1124 02:15:40.750991 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-kube-api-access-74876" (OuterVolumeSpecName: "kube-api-access-74876") pod "1a6cfa24-a493-4143-bee3-f46ed7b4fd1f" (UID: "1a6cfa24-a493-4143-bee3-f46ed7b4fd1f"). InnerVolumeSpecName "kube-api-access-74876". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:15:40 crc kubenswrapper[4755]: I1124 02:15:40.847771 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74876\" (UniqueName: \"kubernetes.io/projected/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f-kube-api-access-74876\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.514443 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa64542bcc827c47c6bc503530e7d49d1b2577cba7044490cd968cac3dd833c8" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.514489 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-mlp8v" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.802625 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25x59/crc-debug-6t2w2"] Nov 24 02:15:41 crc kubenswrapper[4755]: E1124 02:15:41.802984 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a6cfa24-a493-4143-bee3-f46ed7b4fd1f" containerName="container-00" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.802995 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a6cfa24-a493-4143-bee3-f46ed7b4fd1f" containerName="container-00" Nov 24 02:15:41 crc kubenswrapper[4755]: E1124 02:15:41.803020 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df45905-a3d4-409c-9e31-a3060522a66f" containerName="registry-server" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.803030 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df45905-a3d4-409c-9e31-a3060522a66f" containerName="registry-server" Nov 24 02:15:41 crc kubenswrapper[4755]: E1124 02:15:41.803054 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df45905-a3d4-409c-9e31-a3060522a66f" containerName="extract-content" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.803062 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df45905-a3d4-409c-9e31-a3060522a66f" containerName="extract-content" Nov 24 02:15:41 crc kubenswrapper[4755]: E1124 02:15:41.803077 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df45905-a3d4-409c-9e31-a3060522a66f" containerName="extract-utilities" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.803084 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df45905-a3d4-409c-9e31-a3060522a66f" containerName="extract-utilities" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.803257 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a6cfa24-a493-4143-bee3-f46ed7b4fd1f" containerName="container-00" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.803270 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5df45905-a3d4-409c-9e31-a3060522a66f" containerName="registry-server" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.804020 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-6t2w2" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.965379 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/974eaeaf-3172-4797-b719-45d324388723-host\") pod \"crc-debug-6t2w2\" (UID: \"974eaeaf-3172-4797-b719-45d324388723\") " pod="openshift-must-gather-25x59/crc-debug-6t2w2" Nov 24 02:15:41 crc kubenswrapper[4755]: I1124 02:15:41.965515 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn6rh\" (UniqueName: \"kubernetes.io/projected/974eaeaf-3172-4797-b719-45d324388723-kube-api-access-gn6rh\") pod \"crc-debug-6t2w2\" (UID: \"974eaeaf-3172-4797-b719-45d324388723\") " pod="openshift-must-gather-25x59/crc-debug-6t2w2" Nov 24 02:15:42 crc kubenswrapper[4755]: I1124 02:15:42.008538 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a6cfa24-a493-4143-bee3-f46ed7b4fd1f" path="/var/lib/kubelet/pods/1a6cfa24-a493-4143-bee3-f46ed7b4fd1f/volumes" Nov 24 02:15:42 crc kubenswrapper[4755]: I1124 02:15:42.067756 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn6rh\" (UniqueName: \"kubernetes.io/projected/974eaeaf-3172-4797-b719-45d324388723-kube-api-access-gn6rh\") pod \"crc-debug-6t2w2\" (UID: \"974eaeaf-3172-4797-b719-45d324388723\") " pod="openshift-must-gather-25x59/crc-debug-6t2w2" Nov 24 02:15:42 crc kubenswrapper[4755]: I1124 02:15:42.067887 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/974eaeaf-3172-4797-b719-45d324388723-host\") pod \"crc-debug-6t2w2\" (UID: \"974eaeaf-3172-4797-b719-45d324388723\") " pod="openshift-must-gather-25x59/crc-debug-6t2w2" Nov 24 02:15:42 crc kubenswrapper[4755]: I1124 02:15:42.068021 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/974eaeaf-3172-4797-b719-45d324388723-host\") pod \"crc-debug-6t2w2\" (UID: \"974eaeaf-3172-4797-b719-45d324388723\") " pod="openshift-must-gather-25x59/crc-debug-6t2w2" Nov 24 02:15:42 crc kubenswrapper[4755]: I1124 02:15:42.096644 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn6rh\" (UniqueName: \"kubernetes.io/projected/974eaeaf-3172-4797-b719-45d324388723-kube-api-access-gn6rh\") pod \"crc-debug-6t2w2\" (UID: \"974eaeaf-3172-4797-b719-45d324388723\") " pod="openshift-must-gather-25x59/crc-debug-6t2w2" Nov 24 02:15:42 crc kubenswrapper[4755]: I1124 02:15:42.120765 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-6t2w2" Nov 24 02:15:42 crc kubenswrapper[4755]: I1124 02:15:42.529477 4755 generic.go:334] "Generic (PLEG): container finished" podID="974eaeaf-3172-4797-b719-45d324388723" containerID="1fc052bc6d91b5d366af2ff066a2db4979e1cc3ed666da6dc9893bff8e9524de" exitCode=0 Nov 24 02:15:42 crc kubenswrapper[4755]: I1124 02:15:42.529682 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/crc-debug-6t2w2" event={"ID":"974eaeaf-3172-4797-b719-45d324388723","Type":"ContainerDied","Data":"1fc052bc6d91b5d366af2ff066a2db4979e1cc3ed666da6dc9893bff8e9524de"} Nov 24 02:15:42 crc kubenswrapper[4755]: I1124 02:15:42.529856 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/crc-debug-6t2w2" event={"ID":"974eaeaf-3172-4797-b719-45d324388723","Type":"ContainerStarted","Data":"22af618fcf7ecf5d786adfc360cba72f859698c2588347395eca557d771484ba"} Nov 24 02:15:43 crc kubenswrapper[4755]: I1124 02:15:43.094322 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25x59/crc-debug-6t2w2"] Nov 24 02:15:43 crc kubenswrapper[4755]: I1124 02:15:43.120573 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25x59/crc-debug-6t2w2"] Nov 24 02:15:43 crc kubenswrapper[4755]: I1124 02:15:43.637259 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-6t2w2" Nov 24 02:15:43 crc kubenswrapper[4755]: I1124 02:15:43.798960 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/974eaeaf-3172-4797-b719-45d324388723-host\") pod \"974eaeaf-3172-4797-b719-45d324388723\" (UID: \"974eaeaf-3172-4797-b719-45d324388723\") " Nov 24 02:15:43 crc kubenswrapper[4755]: I1124 02:15:43.799054 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gn6rh\" (UniqueName: \"kubernetes.io/projected/974eaeaf-3172-4797-b719-45d324388723-kube-api-access-gn6rh\") pod \"974eaeaf-3172-4797-b719-45d324388723\" (UID: \"974eaeaf-3172-4797-b719-45d324388723\") " Nov 24 02:15:43 crc kubenswrapper[4755]: I1124 02:15:43.799073 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/974eaeaf-3172-4797-b719-45d324388723-host" (OuterVolumeSpecName: "host") pod "974eaeaf-3172-4797-b719-45d324388723" (UID: "974eaeaf-3172-4797-b719-45d324388723"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 02:15:43 crc kubenswrapper[4755]: I1124 02:15:43.799777 4755 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/974eaeaf-3172-4797-b719-45d324388723-host\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:43 crc kubenswrapper[4755]: I1124 02:15:43.807429 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/974eaeaf-3172-4797-b719-45d324388723-kube-api-access-gn6rh" (OuterVolumeSpecName: "kube-api-access-gn6rh") pod "974eaeaf-3172-4797-b719-45d324388723" (UID: "974eaeaf-3172-4797-b719-45d324388723"). InnerVolumeSpecName "kube-api-access-gn6rh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:15:43 crc kubenswrapper[4755]: I1124 02:15:43.901500 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gn6rh\" (UniqueName: \"kubernetes.io/projected/974eaeaf-3172-4797-b719-45d324388723-kube-api-access-gn6rh\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.016366 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="974eaeaf-3172-4797-b719-45d324388723" path="/var/lib/kubelet/pods/974eaeaf-3172-4797-b719-45d324388723/volumes" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.318069 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25x59/crc-debug-779bt"] Nov 24 02:15:44 crc kubenswrapper[4755]: E1124 02:15:44.318446 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="974eaeaf-3172-4797-b719-45d324388723" containerName="container-00" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.318460 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="974eaeaf-3172-4797-b719-45d324388723" containerName="container-00" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.318676 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="974eaeaf-3172-4797-b719-45d324388723" containerName="container-00" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.319236 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-779bt" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.410045 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a718486b-ffcf-4759-9352-e24f56e98895-host\") pod \"crc-debug-779bt\" (UID: \"a718486b-ffcf-4759-9352-e24f56e98895\") " pod="openshift-must-gather-25x59/crc-debug-779bt" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.410136 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tntg6\" (UniqueName: \"kubernetes.io/projected/a718486b-ffcf-4759-9352-e24f56e98895-kube-api-access-tntg6\") pod \"crc-debug-779bt\" (UID: \"a718486b-ffcf-4759-9352-e24f56e98895\") " pod="openshift-must-gather-25x59/crc-debug-779bt" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.512287 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a718486b-ffcf-4759-9352-e24f56e98895-host\") pod \"crc-debug-779bt\" (UID: \"a718486b-ffcf-4759-9352-e24f56e98895\") " pod="openshift-must-gather-25x59/crc-debug-779bt" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.512371 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tntg6\" (UniqueName: \"kubernetes.io/projected/a718486b-ffcf-4759-9352-e24f56e98895-kube-api-access-tntg6\") pod \"crc-debug-779bt\" (UID: \"a718486b-ffcf-4759-9352-e24f56e98895\") " pod="openshift-must-gather-25x59/crc-debug-779bt" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.512647 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a718486b-ffcf-4759-9352-e24f56e98895-host\") pod \"crc-debug-779bt\" (UID: \"a718486b-ffcf-4759-9352-e24f56e98895\") " pod="openshift-must-gather-25x59/crc-debug-779bt" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.536635 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tntg6\" (UniqueName: \"kubernetes.io/projected/a718486b-ffcf-4759-9352-e24f56e98895-kube-api-access-tntg6\") pod \"crc-debug-779bt\" (UID: \"a718486b-ffcf-4759-9352-e24f56e98895\") " pod="openshift-must-gather-25x59/crc-debug-779bt" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.548998 4755 scope.go:117] "RemoveContainer" containerID="1fc052bc6d91b5d366af2ff066a2db4979e1cc3ed666da6dc9893bff8e9524de" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.550323 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-6t2w2" Nov 24 02:15:44 crc kubenswrapper[4755]: I1124 02:15:44.643220 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-779bt" Nov 24 02:15:44 crc kubenswrapper[4755]: W1124 02:15:44.667708 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda718486b_ffcf_4759_9352_e24f56e98895.slice/crio-03c66d16f566d786115ab34d3d0073f554e67b71c717bede6ba6139b1f699dc5 WatchSource:0}: Error finding container 03c66d16f566d786115ab34d3d0073f554e67b71c717bede6ba6139b1f699dc5: Status 404 returned error can't find the container with id 03c66d16f566d786115ab34d3d0073f554e67b71c717bede6ba6139b1f699dc5 Nov 24 02:15:45 crc kubenswrapper[4755]: I1124 02:15:45.563236 4755 generic.go:334] "Generic (PLEG): container finished" podID="a718486b-ffcf-4759-9352-e24f56e98895" containerID="eed7eb53ab596fe82b4cca96d472015c4967ac8523afa87efca2d91ef15e3802" exitCode=0 Nov 24 02:15:45 crc kubenswrapper[4755]: I1124 02:15:45.563332 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/crc-debug-779bt" event={"ID":"a718486b-ffcf-4759-9352-e24f56e98895","Type":"ContainerDied","Data":"eed7eb53ab596fe82b4cca96d472015c4967ac8523afa87efca2d91ef15e3802"} Nov 24 02:15:45 crc kubenswrapper[4755]: I1124 02:15:45.563698 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/crc-debug-779bt" event={"ID":"a718486b-ffcf-4759-9352-e24f56e98895","Type":"ContainerStarted","Data":"03c66d16f566d786115ab34d3d0073f554e67b71c717bede6ba6139b1f699dc5"} Nov 24 02:15:45 crc kubenswrapper[4755]: I1124 02:15:45.610983 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25x59/crc-debug-779bt"] Nov 24 02:15:45 crc kubenswrapper[4755]: I1124 02:15:45.620923 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25x59/crc-debug-779bt"] Nov 24 02:15:46 crc kubenswrapper[4755]: I1124 02:15:46.691982 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-779bt" Nov 24 02:15:46 crc kubenswrapper[4755]: I1124 02:15:46.855356 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a718486b-ffcf-4759-9352-e24f56e98895-host\") pod \"a718486b-ffcf-4759-9352-e24f56e98895\" (UID: \"a718486b-ffcf-4759-9352-e24f56e98895\") " Nov 24 02:15:46 crc kubenswrapper[4755]: I1124 02:15:46.855488 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a718486b-ffcf-4759-9352-e24f56e98895-host" (OuterVolumeSpecName: "host") pod "a718486b-ffcf-4759-9352-e24f56e98895" (UID: "a718486b-ffcf-4759-9352-e24f56e98895"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 02:15:46 crc kubenswrapper[4755]: I1124 02:15:46.855644 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tntg6\" (UniqueName: \"kubernetes.io/projected/a718486b-ffcf-4759-9352-e24f56e98895-kube-api-access-tntg6\") pod \"a718486b-ffcf-4759-9352-e24f56e98895\" (UID: \"a718486b-ffcf-4759-9352-e24f56e98895\") " Nov 24 02:15:46 crc kubenswrapper[4755]: I1124 02:15:46.856077 4755 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a718486b-ffcf-4759-9352-e24f56e98895-host\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:46 crc kubenswrapper[4755]: I1124 02:15:46.860898 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a718486b-ffcf-4759-9352-e24f56e98895-kube-api-access-tntg6" (OuterVolumeSpecName: "kube-api-access-tntg6") pod "a718486b-ffcf-4759-9352-e24f56e98895" (UID: "a718486b-ffcf-4759-9352-e24f56e98895"). InnerVolumeSpecName "kube-api-access-tntg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:15:46 crc kubenswrapper[4755]: I1124 02:15:46.958102 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tntg6\" (UniqueName: \"kubernetes.io/projected/a718486b-ffcf-4759-9352-e24f56e98895-kube-api-access-tntg6\") on node \"crc\" DevicePath \"\"" Nov 24 02:15:47 crc kubenswrapper[4755]: I1124 02:15:47.608129 4755 scope.go:117] "RemoveContainer" containerID="eed7eb53ab596fe82b4cca96d472015c4967ac8523afa87efca2d91ef15e3802" Nov 24 02:15:47 crc kubenswrapper[4755]: I1124 02:15:47.608419 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/crc-debug-779bt" Nov 24 02:15:48 crc kubenswrapper[4755]: I1124 02:15:48.007370 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a718486b-ffcf-4759-9352-e24f56e98895" path="/var/lib/kubelet/pods/a718486b-ffcf-4759-9352-e24f56e98895/volumes" Nov 24 02:15:52 crc kubenswrapper[4755]: I1124 02:15:52.997105 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:15:52 crc kubenswrapper[4755]: E1124 02:15:52.998028 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:16:03 crc kubenswrapper[4755]: I1124 02:16:03.996930 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:16:03 crc kubenswrapper[4755]: E1124 02:16:03.998436 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:16:05 crc kubenswrapper[4755]: I1124 02:16:05.018796 4755 scope.go:117] "RemoveContainer" containerID="9e7adb6d4c85d54897aa534f442bf3c8474b9fc060bc82fb29a3a99955b5af8d" Nov 24 02:16:08 crc kubenswrapper[4755]: I1124 02:16:08.241180 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-64586f69c8-7phjt_f51ca687-cad4-4e48-bb34-1fd95c8bf47d/barbican-api/0.log" Nov 24 02:16:08 crc kubenswrapper[4755]: I1124 02:16:08.364071 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-64586f69c8-7phjt_f51ca687-cad4-4e48-bb34-1fd95c8bf47d/barbican-api-log/0.log" Nov 24 02:16:08 crc kubenswrapper[4755]: I1124 02:16:08.424499 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-86fc6cb5d-5rbfc_b447c315-6a28-4a18-af48-fbcf84cd0c00/barbican-keystone-listener/0.log" Nov 24 02:16:08 crc kubenswrapper[4755]: I1124 02:16:08.461045 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-86fc6cb5d-5rbfc_b447c315-6a28-4a18-af48-fbcf84cd0c00/barbican-keystone-listener-log/0.log" Nov 24 02:16:08 crc kubenswrapper[4755]: I1124 02:16:08.612045 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-545bd7b455-5w47r_4100cddd-df77-4b8a-af0c-746bbd98c80f/barbican-worker/0.log" Nov 24 02:16:08 crc kubenswrapper[4755]: I1124 02:16:08.636339 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-545bd7b455-5w47r_4100cddd-df77-4b8a-af0c-746bbd98c80f/barbican-worker-log/0.log" Nov 24 02:16:08 crc kubenswrapper[4755]: I1124 02:16:08.833714 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-hqnnd_a1f11949-be37-4a9d-9e73-b0cbc20a6d1e/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:08 crc kubenswrapper[4755]: I1124 02:16:08.889139 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cbaee188-8eb8-461e-ba33-4abbb59c4ef7/ceilometer-central-agent/0.log" Nov 24 02:16:08 crc kubenswrapper[4755]: I1124 02:16:08.989964 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cbaee188-8eb8-461e-ba33-4abbb59c4ef7/ceilometer-notification-agent/0.log" Nov 24 02:16:08 crc kubenswrapper[4755]: I1124 02:16:08.997841 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cbaee188-8eb8-461e-ba33-4abbb59c4ef7/proxy-httpd/0.log" Nov 24 02:16:09 crc kubenswrapper[4755]: I1124 02:16:09.074377 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cbaee188-8eb8-461e-ba33-4abbb59c4ef7/sg-core/0.log" Nov 24 02:16:09 crc kubenswrapper[4755]: I1124 02:16:09.403908 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_a7aabf3f-133b-4bdf-86e6-9fb76e89d076/cinder-api/0.log" Nov 24 02:16:09 crc kubenswrapper[4755]: I1124 02:16:09.432501 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_a7aabf3f-133b-4bdf-86e6-9fb76e89d076/cinder-api-log/0.log" Nov 24 02:16:09 crc kubenswrapper[4755]: I1124 02:16:09.603954 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0404892f-7ed1-4990-a8b1-960e6531b017/cinder-scheduler/0.log" Nov 24 02:16:09 crc kubenswrapper[4755]: I1124 02:16:09.654055 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0404892f-7ed1-4990-a8b1-960e6531b017/probe/0.log" Nov 24 02:16:09 crc kubenswrapper[4755]: I1124 02:16:09.746902 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-czxxj_f823da2a-bba1-4b6e-9504-e03ec6a3b94f/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:09 crc kubenswrapper[4755]: I1124 02:16:09.884679 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-xk9rb_4afec439-5744-46c0-a074-88c86ac07fbe/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:09 crc kubenswrapper[4755]: I1124 02:16:09.987991 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-6vs5l_ae6a5980-1b82-42c7-b86c-109e43e389cd/init/0.log" Nov 24 02:16:10 crc kubenswrapper[4755]: I1124 02:16:10.216356 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-6vs5l_ae6a5980-1b82-42c7-b86c-109e43e389cd/init/0.log" Nov 24 02:16:10 crc kubenswrapper[4755]: I1124 02:16:10.218363 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-fc6r8_c0341d77-5182-4cb4-b4f8-4b3389c7887b/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:10 crc kubenswrapper[4755]: I1124 02:16:10.243620 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-6vs5l_ae6a5980-1b82-42c7-b86c-109e43e389cd/dnsmasq-dns/0.log" Nov 24 02:16:10 crc kubenswrapper[4755]: I1124 02:16:10.456340 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_d19dab7a-f075-4b26-a45f-1542a445a8a6/glance-log/0.log" Nov 24 02:16:10 crc kubenswrapper[4755]: I1124 02:16:10.467188 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_d19dab7a-f075-4b26-a45f-1542a445a8a6/glance-httpd/0.log" Nov 24 02:16:10 crc kubenswrapper[4755]: I1124 02:16:10.637501 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3658a90c-83fd-4a8e-9d15-c1b2cac647f1/glance-log/0.log" Nov 24 02:16:10 crc kubenswrapper[4755]: I1124 02:16:10.682309 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3658a90c-83fd-4a8e-9d15-c1b2cac647f1/glance-httpd/0.log" Nov 24 02:16:10 crc kubenswrapper[4755]: I1124 02:16:10.820371 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-75d8fb7cd4-vbxkn_5d176bdd-fe2f-4ed0-a930-2a6ae568b400/horizon/0.log" Nov 24 02:16:10 crc kubenswrapper[4755]: I1124 02:16:10.891924 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-km2xv_dd7e309d-d807-4897-b8e8-cff4ed2c5ac9/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:11 crc kubenswrapper[4755]: I1124 02:16:11.171319 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-v9bk9_2b4a98ef-0655-4257-be72-766516d54fc4/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:11 crc kubenswrapper[4755]: I1124 02:16:11.218961 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-75d8fb7cd4-vbxkn_5d176bdd-fe2f-4ed0-a930-2a6ae568b400/horizon-log/0.log" Nov 24 02:16:11 crc kubenswrapper[4755]: I1124 02:16:11.476977 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29399161-hl9nq_97f32270-c319-4ef7-9784-1a63c16a0164/keystone-cron/0.log" Nov 24 02:16:11 crc kubenswrapper[4755]: I1124 02:16:11.488945 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-68b755649b-gdjxt_56aa6d38-fae0-456e-8f3e-1dfc9e21aa0f/keystone-api/0.log" Nov 24 02:16:11 crc kubenswrapper[4755]: I1124 02:16:11.621735 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_c8452109-2acc-4f1b-848f-e1b5cb87590d/kube-state-metrics/0.log" Nov 24 02:16:11 crc kubenswrapper[4755]: I1124 02:16:11.709753 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-dwg9g_cff1906b-beb7-4b0f-b20b-c0d155437b90/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:12 crc kubenswrapper[4755]: I1124 02:16:12.032658 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-765f9bdf9-sx8ch_05c3cedf-f9a5-453f-a879-fea1939c9f87/neutron-httpd/0.log" Nov 24 02:16:12 crc kubenswrapper[4755]: I1124 02:16:12.160696 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-765f9bdf9-sx8ch_05c3cedf-f9a5-453f-a879-fea1939c9f87/neutron-api/0.log" Nov 24 02:16:12 crc kubenswrapper[4755]: I1124 02:16:12.204220 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-q5lrn_0c0128e5-6f6e-4d49-813c-36d2959a8e3e/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:13 crc kubenswrapper[4755]: I1124 02:16:13.048887 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7/nova-api-log/0.log" Nov 24 02:16:13 crc kubenswrapper[4755]: I1124 02:16:13.073028 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_d3b1705c-2c0d-4bd6-b928-87a6a105cb4d/nova-cell0-conductor-conductor/0.log" Nov 24 02:16:13 crc kubenswrapper[4755]: I1124 02:16:13.347323 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_8974724e-99a8-4edd-8637-2767f33d3562/nova-cell1-conductor-conductor/0.log" Nov 24 02:16:13 crc kubenswrapper[4755]: I1124 02:16:13.392756 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d237edf7-c0f4-4cfd-a1d2-fd6ac3f585b7/nova-api-api/0.log" Nov 24 02:16:13 crc kubenswrapper[4755]: I1124 02:16:13.431727 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_0fbd9862-f65c-4c62-8701-83a23ce4211f/nova-cell1-novncproxy-novncproxy/0.log" Nov 24 02:16:13 crc kubenswrapper[4755]: I1124 02:16:13.591147 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-wx7v6_3f6ff548-9e89-4d7c-8a41-d5c769a8d871/nova-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:13 crc kubenswrapper[4755]: I1124 02:16:13.763719 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6db35847-3127-4ec0-b617-18e9c0f03f8a/nova-metadata-log/0.log" Nov 24 02:16:14 crc kubenswrapper[4755]: I1124 02:16:14.110330 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_0b8f9a57-22fa-4115-942f-e6f7343a78e4/mysql-bootstrap/0.log" Nov 24 02:16:14 crc kubenswrapper[4755]: I1124 02:16:14.237770 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_c2bb0127-edec-4f95-a79b-b35b3607c968/nova-scheduler-scheduler/0.log" Nov 24 02:16:14 crc kubenswrapper[4755]: I1124 02:16:14.292581 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_0b8f9a57-22fa-4115-942f-e6f7343a78e4/mysql-bootstrap/0.log" Nov 24 02:16:14 crc kubenswrapper[4755]: I1124 02:16:14.344486 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_0b8f9a57-22fa-4115-942f-e6f7343a78e4/galera/0.log" Nov 24 02:16:14 crc kubenswrapper[4755]: I1124 02:16:14.529129 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_22215216-efac-4810-90f1-4d42ccc6399c/mysql-bootstrap/0.log" Nov 24 02:16:14 crc kubenswrapper[4755]: I1124 02:16:14.736781 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_22215216-efac-4810-90f1-4d42ccc6399c/mysql-bootstrap/0.log" Nov 24 02:16:14 crc kubenswrapper[4755]: I1124 02:16:14.805621 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_22215216-efac-4810-90f1-4d42ccc6399c/galera/0.log" Nov 24 02:16:14 crc kubenswrapper[4755]: I1124 02:16:14.925750 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_1c6ba259-f60e-4b3f-b901-e42aaff73569/openstackclient/0.log" Nov 24 02:16:15 crc kubenswrapper[4755]: I1124 02:16:15.095339 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-7tzgl_06d5bee3-77f6-4f08-bfc6-6c9cf6f0bdc0/ovn-controller/0.log" Nov 24 02:16:15 crc kubenswrapper[4755]: I1124 02:16:15.280286 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6db35847-3127-4ec0-b617-18e9c0f03f8a/nova-metadata-metadata/0.log" Nov 24 02:16:15 crc kubenswrapper[4755]: I1124 02:16:15.290208 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-8kczp_509f3153-a59d-4614-a753-8cd8df81734c/openstack-network-exporter/0.log" Nov 24 02:16:15 crc kubenswrapper[4755]: I1124 02:16:15.408639 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-khjj5_3e8faee1-2ae1-4f03-9379-d35e533f222d/ovsdb-server-init/0.log" Nov 24 02:16:15 crc kubenswrapper[4755]: I1124 02:16:15.571629 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-khjj5_3e8faee1-2ae1-4f03-9379-d35e533f222d/ovsdb-server-init/0.log" Nov 24 02:16:15 crc kubenswrapper[4755]: I1124 02:16:15.663526 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-khjj5_3e8faee1-2ae1-4f03-9379-d35e533f222d/ovs-vswitchd/0.log" Nov 24 02:16:15 crc kubenswrapper[4755]: I1124 02:16:15.666513 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-khjj5_3e8faee1-2ae1-4f03-9379-d35e533f222d/ovsdb-server/0.log" Nov 24 02:16:15 crc kubenswrapper[4755]: I1124 02:16:15.830533 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-bjt8c_0227ae83-e6f1-477d-b3b0-12cfcd8ae318/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:15 crc kubenswrapper[4755]: I1124 02:16:15.918786 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7c5a9efb-a4cf-4485-b3bd-972318209141/openstack-network-exporter/0.log" Nov 24 02:16:15 crc kubenswrapper[4755]: I1124 02:16:15.971549 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7c5a9efb-a4cf-4485-b3bd-972318209141/ovn-northd/0.log" Nov 24 02:16:16 crc kubenswrapper[4755]: I1124 02:16:16.145078 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9b8b513e-db84-49e7-88e5-b023b20bd604/openstack-network-exporter/0.log" Nov 24 02:16:16 crc kubenswrapper[4755]: I1124 02:16:16.231943 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_9b8b513e-db84-49e7-88e5-b023b20bd604/ovsdbserver-nb/0.log" Nov 24 02:16:16 crc kubenswrapper[4755]: I1124 02:16:16.496242 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1fdb8eaf-1302-4fff-a38f-673a89890e64/openstack-network-exporter/0.log" Nov 24 02:16:16 crc kubenswrapper[4755]: I1124 02:16:16.544198 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1fdb8eaf-1302-4fff-a38f-673a89890e64/ovsdbserver-sb/0.log" Nov 24 02:16:16 crc kubenswrapper[4755]: I1124 02:16:16.672644 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8674657456-64797_521180f5-3721-4b4d-8359-e7b69268a36a/placement-api/0.log" Nov 24 02:16:16 crc kubenswrapper[4755]: I1124 02:16:16.819756 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8674657456-64797_521180f5-3721-4b4d-8359-e7b69268a36a/placement-log/0.log" Nov 24 02:16:16 crc kubenswrapper[4755]: I1124 02:16:16.879323 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ba8cbfc3-74a5-4ea6-bd18-8fcab5462623/setup-container/0.log" Nov 24 02:16:17 crc kubenswrapper[4755]: I1124 02:16:17.110579 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ba8cbfc3-74a5-4ea6-bd18-8fcab5462623/rabbitmq/0.log" Nov 24 02:16:17 crc kubenswrapper[4755]: I1124 02:16:17.125203 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ba8cbfc3-74a5-4ea6-bd18-8fcab5462623/setup-container/0.log" Nov 24 02:16:17 crc kubenswrapper[4755]: I1124 02:16:17.164503 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_47ce72c9-6a27-44ff-80ed-d844fa0f1d2e/setup-container/0.log" Nov 24 02:16:17 crc kubenswrapper[4755]: I1124 02:16:17.426686 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_47ce72c9-6a27-44ff-80ed-d844fa0f1d2e/rabbitmq/0.log" Nov 24 02:16:17 crc kubenswrapper[4755]: I1124 02:16:17.427657 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_47ce72c9-6a27-44ff-80ed-d844fa0f1d2e/setup-container/0.log" Nov 24 02:16:17 crc kubenswrapper[4755]: I1124 02:16:17.518228 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-r6vw9_b2e96444-0ef2-436c-9641-b980fd1961d6/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:17 crc kubenswrapper[4755]: I1124 02:16:17.618714 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-m85m8_ff52144e-9604-44f1-9af6-65f8c9928560/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:17 crc kubenswrapper[4755]: I1124 02:16:17.760710 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-5hnsd_96e0eeaf-102b-47ad-8f60-02115894de6e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:17 crc kubenswrapper[4755]: I1124 02:16:17.975525 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-bvm62_bca53d6d-913f-408e-a979-2515d6ee4c8e/ssh-known-hosts-edpm-deployment/0.log" Nov 24 02:16:17 crc kubenswrapper[4755]: I1124 02:16:17.999046 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:16:17 crc kubenswrapper[4755]: E1124 02:16:17.999386 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.026057 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-fnjgp_6f8286f1-2efd-487d-9feb-fe2eb1fa0112/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.273027 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-55d8c9dc95-pj9b5_5e2272ce-0bb7-4cc8-a11b-be4947646efd/proxy-server/0.log" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.299829 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-lpg42_a67b2b8c-3846-4f9d-a5d0-9279028f63e5/swift-ring-rebalance/0.log" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.317678 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-55d8c9dc95-pj9b5_5e2272ce-0bb7-4cc8-a11b-be4947646efd/proxy-httpd/0.log" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.526639 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/account-auditor/0.log" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.547776 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j6fwg"] Nov 24 02:16:18 crc kubenswrapper[4755]: E1124 02:16:18.549122 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a718486b-ffcf-4759-9352-e24f56e98895" containerName="container-00" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.549152 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a718486b-ffcf-4759-9352-e24f56e98895" containerName="container-00" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.549583 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a718486b-ffcf-4759-9352-e24f56e98895" containerName="container-00" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.551431 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.560582 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j6fwg"] Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.629835 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/account-reaper/0.log" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.654528 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdgkx\" (UniqueName: \"kubernetes.io/projected/f54f0c4a-340c-4d34-9726-7e3004fb71e8-kube-api-access-hdgkx\") pod \"redhat-marketplace-j6fwg\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.654671 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-catalog-content\") pod \"redhat-marketplace-j6fwg\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.654786 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-utilities\") pod \"redhat-marketplace-j6fwg\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.670881 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/account-replicator/0.log" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.756298 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-catalog-content\") pod \"redhat-marketplace-j6fwg\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.756374 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-utilities\") pod \"redhat-marketplace-j6fwg\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.756449 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdgkx\" (UniqueName: \"kubernetes.io/projected/f54f0c4a-340c-4d34-9726-7e3004fb71e8-kube-api-access-hdgkx\") pod \"redhat-marketplace-j6fwg\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.757294 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-catalog-content\") pod \"redhat-marketplace-j6fwg\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.757522 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-utilities\") pod \"redhat-marketplace-j6fwg\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.798571 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdgkx\" (UniqueName: \"kubernetes.io/projected/f54f0c4a-340c-4d34-9726-7e3004fb71e8-kube-api-access-hdgkx\") pod \"redhat-marketplace-j6fwg\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.849921 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/account-server/0.log" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.913487 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/container-server/0.log" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.927296 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.935255 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/container-auditor/0.log" Nov 24 02:16:18 crc kubenswrapper[4755]: I1124 02:16:18.944732 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/container-replicator/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.192081 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/object-auditor/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.238539 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/container-updater/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.321844 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/object-expirer/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.337773 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/object-replicator/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.421201 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j6fwg"] Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.459422 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/object-updater/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.493574 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/object-server/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.567048 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/swift-recon-cron/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.568363 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c3d5f6f4-a502-4cbf-95c6-e85416bcd559/rsync/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.757516 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-7grgk_c8f52ac0-a9d0-48c2-b1dc-0ebc5bfd4ad7/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.824727 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_23d50e60-91da-42c3-8d11-5c22eab88929/tempest-tests-tempest-tests-runner/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.919409 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_19e2c4ea-ac40-4007-b635-f5decba54fc3/test-operator-logs-container/0.log" Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.938983 4755 generic.go:334] "Generic (PLEG): container finished" podID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerID="fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc" exitCode=0 Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.939031 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6fwg" event={"ID":"f54f0c4a-340c-4d34-9726-7e3004fb71e8","Type":"ContainerDied","Data":"fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc"} Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.939057 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6fwg" event={"ID":"f54f0c4a-340c-4d34-9726-7e3004fb71e8","Type":"ContainerStarted","Data":"d8ace384be24b37fcf42eb7e3a93e73f3d7f9df4cbd029af3192a99c7f22384b"} Nov 24 02:16:19 crc kubenswrapper[4755]: I1124 02:16:19.941203 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 02:16:20 crc kubenswrapper[4755]: I1124 02:16:20.138292 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-phn9q_dac685c9-1650-4372-9f79-0c359d3169eb/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Nov 24 02:16:21 crc kubenswrapper[4755]: I1124 02:16:21.970173 4755 generic.go:334] "Generic (PLEG): container finished" podID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerID="fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6" exitCode=0 Nov 24 02:16:21 crc kubenswrapper[4755]: I1124 02:16:21.970318 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6fwg" event={"ID":"f54f0c4a-340c-4d34-9726-7e3004fb71e8","Type":"ContainerDied","Data":"fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6"} Nov 24 02:16:22 crc kubenswrapper[4755]: E1124 02:16:22.343787 4755 log.go:32] "ImageFsInfo from image service failed" err="rpc error: code = Unknown desc = get image fs info unable to get usage for /var/lib/containers/storage/overlay-images: get disk usage for path /var/lib/containers/storage/overlay-images: lstat /var/lib/containers/storage/overlay-images/.tmp-images.json862286418: no such file or directory" Nov 24 02:16:22 crc kubenswrapper[4755]: E1124 02:16:22.343852 4755 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get imageFs stats: missing image stats: nil" Nov 24 02:16:22 crc kubenswrapper[4755]: I1124 02:16:22.984105 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6fwg" event={"ID":"f54f0c4a-340c-4d34-9726-7e3004fb71e8","Type":"ContainerStarted","Data":"03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb"} Nov 24 02:16:23 crc kubenswrapper[4755]: I1124 02:16:23.004646 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j6fwg" podStartSLOduration=2.60077931 podStartE2EDuration="5.004631721s" podCreationTimestamp="2025-11-24 02:16:18 +0000 UTC" firstStartedPulling="2025-11-24 02:16:19.940987142 +0000 UTC m=+3804.627052643" lastFinishedPulling="2025-11-24 02:16:22.344839553 +0000 UTC m=+3807.030905054" observedRunningTime="2025-11-24 02:16:23.003089218 +0000 UTC m=+3807.689154719" watchObservedRunningTime="2025-11-24 02:16:23.004631721 +0000 UTC m=+3807.690697222" Nov 24 02:16:28 crc kubenswrapper[4755]: I1124 02:16:28.927481 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:28 crc kubenswrapper[4755]: I1124 02:16:28.927941 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:28 crc kubenswrapper[4755]: I1124 02:16:28.981300 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:29 crc kubenswrapper[4755]: I1124 02:16:29.110077 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:29 crc kubenswrapper[4755]: I1124 02:16:29.227317 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j6fwg"] Nov 24 02:16:29 crc kubenswrapper[4755]: I1124 02:16:29.919915 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_fa97a0b2-add8-4532-ab38-d726de9f0a60/memcached/0.log" Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.067975 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j6fwg" podUID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerName="registry-server" containerID="cri-o://03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb" gracePeriod=2 Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.604487 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.697195 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-catalog-content\") pod \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.697337 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-utilities\") pod \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.697375 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdgkx\" (UniqueName: \"kubernetes.io/projected/f54f0c4a-340c-4d34-9726-7e3004fb71e8-kube-api-access-hdgkx\") pod \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\" (UID: \"f54f0c4a-340c-4d34-9726-7e3004fb71e8\") " Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.698068 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-utilities" (OuterVolumeSpecName: "utilities") pod "f54f0c4a-340c-4d34-9726-7e3004fb71e8" (UID: "f54f0c4a-340c-4d34-9726-7e3004fb71e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.708823 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f54f0c4a-340c-4d34-9726-7e3004fb71e8-kube-api-access-hdgkx" (OuterVolumeSpecName: "kube-api-access-hdgkx") pod "f54f0c4a-340c-4d34-9726-7e3004fb71e8" (UID: "f54f0c4a-340c-4d34-9726-7e3004fb71e8"). InnerVolumeSpecName "kube-api-access-hdgkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.717988 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f54f0c4a-340c-4d34-9726-7e3004fb71e8" (UID: "f54f0c4a-340c-4d34-9726-7e3004fb71e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.799105 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.799142 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdgkx\" (UniqueName: \"kubernetes.io/projected/f54f0c4a-340c-4d34-9726-7e3004fb71e8-kube-api-access-hdgkx\") on node \"crc\" DevicePath \"\"" Nov 24 02:16:31 crc kubenswrapper[4755]: I1124 02:16:31.799153 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f54f0c4a-340c-4d34-9726-7e3004fb71e8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.078006 4755 generic.go:334] "Generic (PLEG): container finished" podID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerID="03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb" exitCode=0 Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.078048 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6fwg" event={"ID":"f54f0c4a-340c-4d34-9726-7e3004fb71e8","Type":"ContainerDied","Data":"03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb"} Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.078073 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6fwg" event={"ID":"f54f0c4a-340c-4d34-9726-7e3004fb71e8","Type":"ContainerDied","Data":"d8ace384be24b37fcf42eb7e3a93e73f3d7f9df4cbd029af3192a99c7f22384b"} Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.078089 4755 scope.go:117] "RemoveContainer" containerID="03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.078207 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j6fwg" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.097446 4755 scope.go:117] "RemoveContainer" containerID="fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.119171 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j6fwg"] Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.120807 4755 scope.go:117] "RemoveContainer" containerID="fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.132344 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j6fwg"] Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.158887 4755 scope.go:117] "RemoveContainer" containerID="03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb" Nov 24 02:16:32 crc kubenswrapper[4755]: E1124 02:16:32.161061 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb\": container with ID starting with 03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb not found: ID does not exist" containerID="03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.161097 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb"} err="failed to get container status \"03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb\": rpc error: code = NotFound desc = could not find container \"03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb\": container with ID starting with 03e68192c0f6dc259fcc816bfb9d59fc84400abd72fa854413ad488f226469fb not found: ID does not exist" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.161119 4755 scope.go:117] "RemoveContainer" containerID="fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6" Nov 24 02:16:32 crc kubenswrapper[4755]: E1124 02:16:32.161420 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6\": container with ID starting with fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6 not found: ID does not exist" containerID="fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.161440 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6"} err="failed to get container status \"fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6\": rpc error: code = NotFound desc = could not find container \"fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6\": container with ID starting with fed0c0a4adc720711dee716ff7059401184af7a64ec47bd9d0b27eef2f9f57a6 not found: ID does not exist" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.161452 4755 scope.go:117] "RemoveContainer" containerID="fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc" Nov 24 02:16:32 crc kubenswrapper[4755]: E1124 02:16:32.161813 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc\": container with ID starting with fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc not found: ID does not exist" containerID="fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.161833 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc"} err="failed to get container status \"fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc\": rpc error: code = NotFound desc = could not find container \"fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc\": container with ID starting with fcaa5c805f7156f4e67ca256b9edd873236fcd47a1a3fa7377270dd12c9d82bc not found: ID does not exist" Nov 24 02:16:32 crc kubenswrapper[4755]: I1124 02:16:32.996545 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:16:32 crc kubenswrapper[4755]: E1124 02:16:32.997062 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:16:34 crc kubenswrapper[4755]: I1124 02:16:34.008946 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" path="/var/lib/kubelet/pods/f54f0c4a-340c-4d34-9726-7e3004fb71e8/volumes" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.003011 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/util/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.157206 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/pull/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.163586 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/util/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.223847 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/pull/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.321857 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/util/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.365310 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/extract/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.372659 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7b7d855816ca9e76c7c9245ff70832f330b0e0531ae10de757037d494dxbhbm_17800bdb-a186-4da5-aa5f-49925f2c6b5a/pull/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.571077 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-75fb479bcc-b297l_1660a0eb-228b-41bc-a360-a71fec20d415/kube-rbac-proxy/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.583824 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6498cbf48f-hh7kh_fc04cdee-f1bd-4d40-9c1c-02f4e9661851/kube-rbac-proxy/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.658425 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-75fb479bcc-b297l_1660a0eb-228b-41bc-a360-a71fec20d415/manager/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.802465 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6498cbf48f-hh7kh_fc04cdee-f1bd-4d40-9c1c-02f4e9661851/manager/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.821734 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-767ccfd65f-fdwk5_e84b7100-14c9-436c-97e5-d14c2455b42a/kube-rbac-proxy/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.856550 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-767ccfd65f-fdwk5_e84b7100-14c9-436c-97e5-d14c2455b42a/manager/0.log" Nov 24 02:16:45 crc kubenswrapper[4755]: I1124 02:16:45.978724 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7969689c84-bnmvb_a22ef49d-f887-41f4-ad37-6b1b0bf7a748/kube-rbac-proxy/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.110752 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7969689c84-bnmvb_a22ef49d-f887-41f4-ad37-6b1b0bf7a748/manager/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.174673 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-56f54d6746-45h29_22780566-edb3-47e3-b3ea-a42def0f4460/kube-rbac-proxy/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.198398 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-56f54d6746-45h29_22780566-edb3-47e3-b3ea-a42def0f4460/manager/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.346086 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-598f69df5d-97znt_77153df1-136d-456e-a6e0-817b2f633d3e/kube-rbac-proxy/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.407008 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-598f69df5d-97znt_77153df1-136d-456e-a6e0-817b2f633d3e/manager/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.527719 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-6dd8864d7c-zsbst_fd91f6d6-1cc9-4350-a22a-b3859073f6e0/kube-rbac-proxy/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.626349 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-99b499f4-p9wgw_f443bd2d-3e36-44eb-9684-8ec505b8bea7/kube-rbac-proxy/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.720816 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-6dd8864d7c-zsbst_fd91f6d6-1cc9-4350-a22a-b3859073f6e0/manager/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.767745 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-99b499f4-p9wgw_f443bd2d-3e36-44eb-9684-8ec505b8bea7/manager/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.857305 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7454b96578-5622b_dd14c3fa-bb96-4795-b339-a506c71b16a2/kube-rbac-proxy/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.957578 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7454b96578-5622b_dd14c3fa-bb96-4795-b339-a506c71b16a2/manager/0.log" Nov 24 02:16:46 crc kubenswrapper[4755]: I1124 02:16:46.996628 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:16:46 crc kubenswrapper[4755]: E1124 02:16:46.997045 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:16:47 crc kubenswrapper[4755]: I1124 02:16:47.018972 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-58f887965d-2rnh9_33d6bfe7-943b-4a59-bfdd-e240b869163d/kube-rbac-proxy/0.log" Nov 24 02:16:47 crc kubenswrapper[4755]: I1124 02:16:47.062379 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-58f887965d-2rnh9_33d6bfe7-943b-4a59-bfdd-e240b869163d/manager/0.log" Nov 24 02:16:47 crc kubenswrapper[4755]: I1124 02:16:47.197210 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-54b5986bb8-gqlfb_1a7c3ac2-1c0f-474e-837c-b80226975978/kube-rbac-proxy/0.log" Nov 24 02:16:47 crc kubenswrapper[4755]: I1124 02:16:47.231727 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-54b5986bb8-gqlfb_1a7c3ac2-1c0f-474e-837c-b80226975978/manager/0.log" Nov 24 02:16:47 crc kubenswrapper[4755]: I1124 02:16:47.492353 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-78bd47f458-pcp8q_9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19/kube-rbac-proxy/0.log" Nov 24 02:16:47 crc kubenswrapper[4755]: I1124 02:16:47.668768 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-78bd47f458-pcp8q_9e78d1d5-6ae4-4fc3-9edf-77e9f331bf19/manager/0.log" Nov 24 02:16:47 crc kubenswrapper[4755]: I1124 02:16:47.679417 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-cfbb9c588-prcbz_9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07/kube-rbac-proxy/0.log" Nov 24 02:16:47 crc kubenswrapper[4755]: I1124 02:16:47.775545 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-cfbb9c588-prcbz_9b49aa48-d2db-40a6-9f6f-6b8e5bd3cd07/manager/0.log" Nov 24 02:16:47 crc kubenswrapper[4755]: I1124 02:16:47.849891 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-54cfbf4c7d-7t6ft_3c770fe2-ea89-4ba8-b4f0-95a4f310ea65/kube-rbac-proxy/0.log" Nov 24 02:16:47 crc kubenswrapper[4755]: I1124 02:16:47.903818 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-54cfbf4c7d-7t6ft_3c770fe2-ea89-4ba8-b4f0-95a4f310ea65/manager/0.log" Nov 24 02:16:48 crc kubenswrapper[4755]: I1124 02:16:48.026788 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv_f013c70d-8c89-40f5-a132-393403d297c2/kube-rbac-proxy/0.log" Nov 24 02:16:48 crc kubenswrapper[4755]: I1124 02:16:48.043821 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-8c7444f48-rbmhv_f013c70d-8c89-40f5-a132-393403d297c2/manager/0.log" Nov 24 02:16:48 crc kubenswrapper[4755]: I1124 02:16:48.204517 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-68786bb554-2pljv_8695f181-2de4-4fa8-b952-8208ab710b94/kube-rbac-proxy/0.log" Nov 24 02:16:48 crc kubenswrapper[4755]: I1124 02:16:48.339197 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-95bdd885d-rpv4q_3730c324-139f-4560-ac12-c8e0595a58cb/kube-rbac-proxy/0.log" Nov 24 02:16:48 crc kubenswrapper[4755]: I1124 02:16:48.565209 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-7f5r2_ccfae768-f324-4db5-ac90-8fd333deca44/registry-server/0.log" Nov 24 02:16:48 crc kubenswrapper[4755]: I1124 02:16:48.585553 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-95bdd885d-rpv4q_3730c324-139f-4560-ac12-c8e0595a58cb/operator/0.log" Nov 24 02:16:48 crc kubenswrapper[4755]: I1124 02:16:48.887552 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-54fc5f65b7-b94wx_73185acc-71f3-452e-8454-ebad97b6c6ad/manager/0.log" Nov 24 02:16:48 crc kubenswrapper[4755]: I1124 02:16:48.906002 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-54fc5f65b7-b94wx_73185acc-71f3-452e-8454-ebad97b6c6ad/kube-rbac-proxy/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.097303 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b797b8dff-s22mt_eb1590a5-3843-4540-ac41-bdfe49ae6569/manager/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.165675 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b797b8dff-s22mt_eb1590a5-3843-4540-ac41-bdfe49ae6569/kube-rbac-proxy/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.294841 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-68786bb554-2pljv_8695f181-2de4-4fa8-b952-8208ab710b94/manager/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.302433 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-g2b7d_2107694a-19fc-40cd-9ef2-b8b60b8b88e2/operator/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.405876 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-d656998f4-kbj7b_62e235a5-7928-4e26-9948-a3d2a829ef23/kube-rbac-proxy/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.436453 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-d656998f4-kbj7b_62e235a5-7928-4e26-9948-a3d2a829ef23/manager/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.488624 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6d4bf84b58-7pmzf_25b69b88-4612-4183-a978-b9dd58502d37/kube-rbac-proxy/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.586272 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6d4bf84b58-7pmzf_25b69b88-4612-4183-a978-b9dd58502d37/manager/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.657357 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-b4c496f69-n9dqh_8914c196-79e5-456c-9a42-1f4464f8dbf8/kube-rbac-proxy/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.680037 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-b4c496f69-n9dqh_8914c196-79e5-456c-9a42-1f4464f8dbf8/manager/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.789522 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-8c6448b9f-j56hj_03ab2bfa-29d5-408b-8d69-54b8b367be23/kube-rbac-proxy/0.log" Nov 24 02:16:49 crc kubenswrapper[4755]: I1124 02:16:49.891863 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-8c6448b9f-j56hj_03ab2bfa-29d5-408b-8d69-54b8b367be23/manager/0.log" Nov 24 02:16:58 crc kubenswrapper[4755]: I1124 02:16:58.996744 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:16:58 crc kubenswrapper[4755]: E1124 02:16:58.997632 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:17:05 crc kubenswrapper[4755]: I1124 02:17:05.162090 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-6qvk7_fa143983-92e8-480e-9bb3-928892077000/control-plane-machine-set-operator/0.log" Nov 24 02:17:05 crc kubenswrapper[4755]: I1124 02:17:05.315429 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9f7kc_fceb51f9-deec-4840-86d0-a67228819bef/kube-rbac-proxy/0.log" Nov 24 02:17:05 crc kubenswrapper[4755]: I1124 02:17:05.361053 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9f7kc_fceb51f9-deec-4840-86d0-a67228819bef/machine-api-operator/0.log" Nov 24 02:17:12 crc kubenswrapper[4755]: I1124 02:17:12.000243 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:17:12 crc kubenswrapper[4755]: E1124 02:17:12.000969 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:17:17 crc kubenswrapper[4755]: I1124 02:17:17.072083 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-vwz4w_4b370b30-6433-4155-be26-46a905bb6b3d/cert-manager-controller/0.log" Nov 24 02:17:17 crc kubenswrapper[4755]: I1124 02:17:17.261139 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-kvcl8_844c92d0-68cd-410c-ba95-f440eb5bfcfc/cert-manager-cainjector/0.log" Nov 24 02:17:17 crc kubenswrapper[4755]: I1124 02:17:17.305186 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-t6d9j_6c7e5cd9-3286-4a20-849c-92cc8c910e10/cert-manager-webhook/0.log" Nov 24 02:17:24 crc kubenswrapper[4755]: I1124 02:17:24.996793 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:17:24 crc kubenswrapper[4755]: E1124 02:17:24.997511 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:17:29 crc kubenswrapper[4755]: I1124 02:17:29.281568 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-5874bd7bc5-bjf4f_b0fbafa4-291f-4eee-8133-30e7a85ff7ff/nmstate-console-plugin/0.log" Nov 24 02:17:29 crc kubenswrapper[4755]: I1124 02:17:29.419930 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-7hlnk_3e04477a-f03b-4cd7-ba29-1622ea087da5/nmstate-handler/0.log" Nov 24 02:17:29 crc kubenswrapper[4755]: I1124 02:17:29.467574 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-db5zr_e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277/kube-rbac-proxy/0.log" Nov 24 02:17:29 crc kubenswrapper[4755]: I1124 02:17:29.472058 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-db5zr_e48ce9d8-52d8-4ab4-9f9b-b4ea43d68277/nmstate-metrics/0.log" Nov 24 02:17:29 crc kubenswrapper[4755]: I1124 02:17:29.615564 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-557fdffb88-qmm9f_0594f79b-cc74-4be7-a0c0-605666ea9f19/nmstate-operator/0.log" Nov 24 02:17:29 crc kubenswrapper[4755]: I1124 02:17:29.680486 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6b89b748d8-6dn5d_d2fd6ef2-fcd0-4169-a26a-4f30b0619efa/nmstate-webhook/0.log" Nov 24 02:17:37 crc kubenswrapper[4755]: I1124 02:17:37.997141 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:17:37 crc kubenswrapper[4755]: E1124 02:17:37.998054 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:17:42 crc kubenswrapper[4755]: I1124 02:17:42.679136 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-lz2vt_cf1d6671-b748-4db1-89b9-9ae4968f8297/kube-rbac-proxy/0.log" Nov 24 02:17:42 crc kubenswrapper[4755]: I1124 02:17:42.765895 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-lz2vt_cf1d6671-b748-4db1-89b9-9ae4968f8297/controller/0.log" Nov 24 02:17:42 crc kubenswrapper[4755]: I1124 02:17:42.873802 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-frr-files/0.log" Nov 24 02:17:42 crc kubenswrapper[4755]: I1124 02:17:42.987354 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-frr-files/0.log" Nov 24 02:17:42 crc kubenswrapper[4755]: I1124 02:17:42.996878 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-metrics/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.045595 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-reloader/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.086714 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-reloader/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.241987 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-frr-files/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.276135 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-reloader/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.288369 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-metrics/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.310863 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-metrics/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.432333 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-frr-files/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.493803 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-metrics/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.495465 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/controller/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.508272 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/cp-reloader/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.673489 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/kube-rbac-proxy/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.695179 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/kube-rbac-proxy-frr/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.705942 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/frr-metrics/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.863355 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/reloader/0.log" Nov 24 02:17:43 crc kubenswrapper[4755]: I1124 02:17:43.918155 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-6998585d5-fd8hk_326eacf2-4f20-4577-b64c-5e5a55b8667a/frr-k8s-webhook-server/0.log" Nov 24 02:17:44 crc kubenswrapper[4755]: I1124 02:17:44.116364 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-55d87b5596-gzttc_546c69cc-7307-405e-b5db-22ab6f25b47d/manager/0.log" Nov 24 02:17:44 crc kubenswrapper[4755]: I1124 02:17:44.323669 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7d8857ff56-s2ljm_19c770cd-3557-4cc8-a06d-0597e9766be2/webhook-server/0.log" Nov 24 02:17:44 crc kubenswrapper[4755]: I1124 02:17:44.376592 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-4qvvw_06358ec3-9d2a-433d-8de9-5044c2e189a4/kube-rbac-proxy/0.log" Nov 24 02:17:44 crc kubenswrapper[4755]: I1124 02:17:44.946168 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-4qvvw_06358ec3-9d2a-433d-8de9-5044c2e189a4/speaker/0.log" Nov 24 02:17:45 crc kubenswrapper[4755]: I1124 02:17:45.162404 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8wz9f_5218d7a8-6776-4f68-afa1-fc48e1d058f5/frr/0.log" Nov 24 02:17:50 crc kubenswrapper[4755]: I1124 02:17:50.997033 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:17:50 crc kubenswrapper[4755]: E1124 02:17:50.997797 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:17:56 crc kubenswrapper[4755]: I1124 02:17:56.355093 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/util/0.log" Nov 24 02:17:56 crc kubenswrapper[4755]: I1124 02:17:56.537488 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/pull/0.log" Nov 24 02:17:56 crc kubenswrapper[4755]: I1124 02:17:56.572284 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/util/0.log" Nov 24 02:17:56 crc kubenswrapper[4755]: I1124 02:17:56.597292 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/pull/0.log" Nov 24 02:17:56 crc kubenswrapper[4755]: I1124 02:17:56.735190 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/util/0.log" Nov 24 02:17:56 crc kubenswrapper[4755]: I1124 02:17:56.736111 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/pull/0.log" Nov 24 02:17:56 crc kubenswrapper[4755]: I1124 02:17:56.760891 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772er2r7z_9fa781fe-e51d-4912-b210-e873945bcbf8/extract/0.log" Nov 24 02:17:56 crc kubenswrapper[4755]: I1124 02:17:56.901243 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-utilities/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.107135 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-utilities/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.112390 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-content/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.116030 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-content/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.248312 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-utilities/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.288171 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/extract-content/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.486515 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-utilities/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.732783 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-content/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.759058 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8mk5m_fb5622d8-2858-48d9-94e9-5a4ea557c6ae/registry-server/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.763589 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-utilities/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.776154 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-content/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.916233 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-utilities/0.log" Nov 24 02:17:57 crc kubenswrapper[4755]: I1124 02:17:57.942430 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/extract-content/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.158981 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/util/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.416327 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/pull/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.425140 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/util/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.441261 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/pull/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.474592 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4ms9l_802e4447-64d9-4370-954b-7212c1ef7a9d/registry-server/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.582386 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/util/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.589380 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/pull/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.638109 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6ttqpp_1a72defe-0081-4267-ab64-1c844503e5cc/extract/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.777234 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bs6fj_be6e8d7e-1c19-449b-a7f5-c104a92edf7c/marketplace-operator/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.824921 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-utilities/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.963045 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-content/0.log" Nov 24 02:17:58 crc kubenswrapper[4755]: I1124 02:17:58.974046 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-content/0.log" Nov 24 02:17:59 crc kubenswrapper[4755]: I1124 02:17:59.031547 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-utilities/0.log" Nov 24 02:17:59 crc kubenswrapper[4755]: I1124 02:17:59.181291 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-content/0.log" Nov 24 02:17:59 crc kubenswrapper[4755]: I1124 02:17:59.181663 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/extract-utilities/0.log" Nov 24 02:17:59 crc kubenswrapper[4755]: I1124 02:17:59.366723 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-vjn27_098cf4e9-8a23-42d7-ae62-497aa11abcca/registry-server/0.log" Nov 24 02:17:59 crc kubenswrapper[4755]: I1124 02:17:59.408437 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-utilities/0.log" Nov 24 02:17:59 crc kubenswrapper[4755]: I1124 02:17:59.573532 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-content/0.log" Nov 24 02:17:59 crc kubenswrapper[4755]: I1124 02:17:59.591088 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-utilities/0.log" Nov 24 02:17:59 crc kubenswrapper[4755]: I1124 02:17:59.609265 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-content/0.log" Nov 24 02:17:59 crc kubenswrapper[4755]: I1124 02:17:59.729730 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-utilities/0.log" Nov 24 02:17:59 crc kubenswrapper[4755]: I1124 02:17:59.758452 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/extract-content/0.log" Nov 24 02:18:00 crc kubenswrapper[4755]: I1124 02:18:00.204479 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cgvb9_49c6f838-5dc3-4129-ad76-15b58019b9cc/registry-server/0.log" Nov 24 02:18:02 crc kubenswrapper[4755]: I1124 02:18:02.996248 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:18:02 crc kubenswrapper[4755]: E1124 02:18:02.996937 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h8xzm_openshift-machine-config-operator(b1962128-02a0-46c3-82c2-5055c2aed0b9)\"" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" Nov 24 02:18:14 crc kubenswrapper[4755]: I1124 02:18:14.998352 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:18:16 crc kubenswrapper[4755]: I1124 02:18:16.147992 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"132aaca735d6247969fce4c7c62b56055bb2249fa4151064cefe83ffdb9514e8"} Nov 24 02:19:39 crc kubenswrapper[4755]: I1124 02:19:39.990701 4755 generic.go:334] "Generic (PLEG): container finished" podID="811c205f-798a-47ad-9b10-3fc501a6b9f6" containerID="3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29" exitCode=0 Nov 24 02:19:39 crc kubenswrapper[4755]: I1124 02:19:39.990776 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25x59/must-gather-r7rxm" event={"ID":"811c205f-798a-47ad-9b10-3fc501a6b9f6","Type":"ContainerDied","Data":"3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29"} Nov 24 02:19:39 crc kubenswrapper[4755]: I1124 02:19:39.992309 4755 scope.go:117] "RemoveContainer" containerID="3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29" Nov 24 02:19:40 crc kubenswrapper[4755]: I1124 02:19:40.764722 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-25x59_must-gather-r7rxm_811c205f-798a-47ad-9b10-3fc501a6b9f6/gather/0.log" Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.018518 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25x59/must-gather-r7rxm"] Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.019384 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-25x59/must-gather-r7rxm" podUID="811c205f-798a-47ad-9b10-3fc501a6b9f6" containerName="copy" containerID="cri-o://1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d" gracePeriod=2 Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.032362 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25x59/must-gather-r7rxm"] Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.463520 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-25x59_must-gather-r7rxm_811c205f-798a-47ad-9b10-3fc501a6b9f6/copy/0.log" Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.464257 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/must-gather-r7rxm" Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.615654 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2q5p\" (UniqueName: \"kubernetes.io/projected/811c205f-798a-47ad-9b10-3fc501a6b9f6-kube-api-access-d2q5p\") pod \"811c205f-798a-47ad-9b10-3fc501a6b9f6\" (UID: \"811c205f-798a-47ad-9b10-3fc501a6b9f6\") " Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.615803 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/811c205f-798a-47ad-9b10-3fc501a6b9f6-must-gather-output\") pod \"811c205f-798a-47ad-9b10-3fc501a6b9f6\" (UID: \"811c205f-798a-47ad-9b10-3fc501a6b9f6\") " Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.633252 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/811c205f-798a-47ad-9b10-3fc501a6b9f6-kube-api-access-d2q5p" (OuterVolumeSpecName: "kube-api-access-d2q5p") pod "811c205f-798a-47ad-9b10-3fc501a6b9f6" (UID: "811c205f-798a-47ad-9b10-3fc501a6b9f6"). InnerVolumeSpecName "kube-api-access-d2q5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.718255 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2q5p\" (UniqueName: \"kubernetes.io/projected/811c205f-798a-47ad-9b10-3fc501a6b9f6-kube-api-access-d2q5p\") on node \"crc\" DevicePath \"\"" Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.749060 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/811c205f-798a-47ad-9b10-3fc501a6b9f6-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "811c205f-798a-47ad-9b10-3fc501a6b9f6" (UID: "811c205f-798a-47ad-9b10-3fc501a6b9f6"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:19:51 crc kubenswrapper[4755]: I1124 02:19:51.820172 4755 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/811c205f-798a-47ad-9b10-3fc501a6b9f6-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 24 02:19:52 crc kubenswrapper[4755]: I1124 02:19:52.007522 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="811c205f-798a-47ad-9b10-3fc501a6b9f6" path="/var/lib/kubelet/pods/811c205f-798a-47ad-9b10-3fc501a6b9f6/volumes" Nov 24 02:19:52 crc kubenswrapper[4755]: I1124 02:19:52.116499 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-25x59_must-gather-r7rxm_811c205f-798a-47ad-9b10-3fc501a6b9f6/copy/0.log" Nov 24 02:19:52 crc kubenswrapper[4755]: I1124 02:19:52.116968 4755 generic.go:334] "Generic (PLEG): container finished" podID="811c205f-798a-47ad-9b10-3fc501a6b9f6" containerID="1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d" exitCode=143 Nov 24 02:19:52 crc kubenswrapper[4755]: I1124 02:19:52.117024 4755 scope.go:117] "RemoveContainer" containerID="1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d" Nov 24 02:19:52 crc kubenswrapper[4755]: I1124 02:19:52.117034 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25x59/must-gather-r7rxm" Nov 24 02:19:52 crc kubenswrapper[4755]: I1124 02:19:52.139219 4755 scope.go:117] "RemoveContainer" containerID="3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29" Nov 24 02:19:52 crc kubenswrapper[4755]: I1124 02:19:52.204853 4755 scope.go:117] "RemoveContainer" containerID="1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d" Nov 24 02:19:52 crc kubenswrapper[4755]: E1124 02:19:52.205356 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d\": container with ID starting with 1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d not found: ID does not exist" containerID="1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d" Nov 24 02:19:52 crc kubenswrapper[4755]: I1124 02:19:52.205428 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d"} err="failed to get container status \"1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d\": rpc error: code = NotFound desc = could not find container \"1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d\": container with ID starting with 1ab254cd4019c07e88554a840170603a14c9a314d32b668bbceedef1f81a5c9d not found: ID does not exist" Nov 24 02:19:52 crc kubenswrapper[4755]: I1124 02:19:52.205469 4755 scope.go:117] "RemoveContainer" containerID="3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29" Nov 24 02:19:52 crc kubenswrapper[4755]: E1124 02:19:52.205831 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29\": container with ID starting with 3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29 not found: ID does not exist" containerID="3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29" Nov 24 02:19:52 crc kubenswrapper[4755]: I1124 02:19:52.205872 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29"} err="failed to get container status \"3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29\": rpc error: code = NotFound desc = could not find container \"3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29\": container with ID starting with 3e4ca36a298490ae8feb66d41e99713f3ae9eeed22da9ab1612e3ee12d2ffd29 not found: ID does not exist" Nov 24 02:20:33 crc kubenswrapper[4755]: I1124 02:20:33.294971 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:20:33 crc kubenswrapper[4755]: I1124 02:20:33.295577 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:20:57 crc kubenswrapper[4755]: I1124 02:20:57.784050 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="22215216-efac-4810-90f1-4d42ccc6399c" containerName="galera" probeResult="failure" output="command timed out" Nov 24 02:20:57 crc kubenswrapper[4755]: I1124 02:20:57.784846 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-galera-0" podUID="22215216-efac-4810-90f1-4d42ccc6399c" containerName="galera" probeResult="failure" output="command timed out" Nov 24 02:21:03 crc kubenswrapper[4755]: I1124 02:21:03.295055 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:21:03 crc kubenswrapper[4755]: I1124 02:21:03.295590 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:21:05 crc kubenswrapper[4755]: I1124 02:21:05.230549 4755 scope.go:117] "RemoveContainer" containerID="26af8c08ca463596685a1bc59fa2fa008196c979d478af9a2bcbc0c1a0275c54" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.215171 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8tl6n"] Nov 24 02:21:18 crc kubenswrapper[4755]: E1124 02:21:18.216078 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerName="registry-server" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.216092 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerName="registry-server" Nov 24 02:21:18 crc kubenswrapper[4755]: E1124 02:21:18.216109 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerName="extract-content" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.216115 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerName="extract-content" Nov 24 02:21:18 crc kubenswrapper[4755]: E1124 02:21:18.216124 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerName="extract-utilities" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.216130 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerName="extract-utilities" Nov 24 02:21:18 crc kubenswrapper[4755]: E1124 02:21:18.216138 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="811c205f-798a-47ad-9b10-3fc501a6b9f6" containerName="copy" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.216143 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="811c205f-798a-47ad-9b10-3fc501a6b9f6" containerName="copy" Nov 24 02:21:18 crc kubenswrapper[4755]: E1124 02:21:18.216179 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="811c205f-798a-47ad-9b10-3fc501a6b9f6" containerName="gather" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.216185 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="811c205f-798a-47ad-9b10-3fc501a6b9f6" containerName="gather" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.216443 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f54f0c4a-340c-4d34-9726-7e3004fb71e8" containerName="registry-server" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.216458 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="811c205f-798a-47ad-9b10-3fc501a6b9f6" containerName="gather" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.216471 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="811c205f-798a-47ad-9b10-3fc501a6b9f6" containerName="copy" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.217748 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.234727 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8tl6n"] Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.268577 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-utilities\") pod \"certified-operators-8tl6n\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.268843 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64s2z\" (UniqueName: \"kubernetes.io/projected/0fe33c34-d157-48cc-8853-2592600f4b37-kube-api-access-64s2z\") pod \"certified-operators-8tl6n\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.268964 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-catalog-content\") pod \"certified-operators-8tl6n\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.371847 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-catalog-content\") pod \"certified-operators-8tl6n\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.372384 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-catalog-content\") pod \"certified-operators-8tl6n\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.372716 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-utilities\") pod \"certified-operators-8tl6n\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.372984 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64s2z\" (UniqueName: \"kubernetes.io/projected/0fe33c34-d157-48cc-8853-2592600f4b37-kube-api-access-64s2z\") pod \"certified-operators-8tl6n\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.372989 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-utilities\") pod \"certified-operators-8tl6n\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.406482 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64s2z\" (UniqueName: \"kubernetes.io/projected/0fe33c34-d157-48cc-8853-2592600f4b37-kube-api-access-64s2z\") pod \"certified-operators-8tl6n\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:18 crc kubenswrapper[4755]: I1124 02:21:18.545342 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:19 crc kubenswrapper[4755]: I1124 02:21:19.091282 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8tl6n"] Nov 24 02:21:19 crc kubenswrapper[4755]: I1124 02:21:19.990847 4755 generic.go:334] "Generic (PLEG): container finished" podID="0fe33c34-d157-48cc-8853-2592600f4b37" containerID="7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87" exitCode=0 Nov 24 02:21:19 crc kubenswrapper[4755]: I1124 02:21:19.990953 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tl6n" event={"ID":"0fe33c34-d157-48cc-8853-2592600f4b37","Type":"ContainerDied","Data":"7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87"} Nov 24 02:21:19 crc kubenswrapper[4755]: I1124 02:21:19.991281 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tl6n" event={"ID":"0fe33c34-d157-48cc-8853-2592600f4b37","Type":"ContainerStarted","Data":"920bbf58ca8a1a2cdfb7f930ef357f14750235224db979b4ece01746aef8221a"} Nov 24 02:21:19 crc kubenswrapper[4755]: I1124 02:21:19.993926 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 02:21:22 crc kubenswrapper[4755]: I1124 02:21:22.016038 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tl6n" event={"ID":"0fe33c34-d157-48cc-8853-2592600f4b37","Type":"ContainerStarted","Data":"d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de"} Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.033512 4755 generic.go:334] "Generic (PLEG): container finished" podID="0fe33c34-d157-48cc-8853-2592600f4b37" containerID="d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de" exitCode=0 Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.033590 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tl6n" event={"ID":"0fe33c34-d157-48cc-8853-2592600f4b37","Type":"ContainerDied","Data":"d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de"} Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.080703 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vcqdv"] Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.084308 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.090441 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vcqdv"] Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.164876 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kl9z6\" (UniqueName: \"kubernetes.io/projected/229e04af-3144-4594-978b-46e7e77be937-kube-api-access-kl9z6\") pod \"redhat-operators-vcqdv\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.165252 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-catalog-content\") pod \"redhat-operators-vcqdv\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.165389 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-utilities\") pod \"redhat-operators-vcqdv\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.267587 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kl9z6\" (UniqueName: \"kubernetes.io/projected/229e04af-3144-4594-978b-46e7e77be937-kube-api-access-kl9z6\") pod \"redhat-operators-vcqdv\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.267777 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-catalog-content\") pod \"redhat-operators-vcqdv\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.267829 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-utilities\") pod \"redhat-operators-vcqdv\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.268302 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-utilities\") pod \"redhat-operators-vcqdv\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.268352 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-catalog-content\") pod \"redhat-operators-vcqdv\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.289979 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kl9z6\" (UniqueName: \"kubernetes.io/projected/229e04af-3144-4594-978b-46e7e77be937-kube-api-access-kl9z6\") pod \"redhat-operators-vcqdv\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.415008 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:23 crc kubenswrapper[4755]: I1124 02:21:23.949673 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vcqdv"] Nov 24 02:21:24 crc kubenswrapper[4755]: W1124 02:21:24.090806 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod229e04af_3144_4594_978b_46e7e77be937.slice/crio-8e0b524579784a9c190579f0a6d83072eadcd5048659203cd625151c42d536bb WatchSource:0}: Error finding container 8e0b524579784a9c190579f0a6d83072eadcd5048659203cd625151c42d536bb: Status 404 returned error can't find the container with id 8e0b524579784a9c190579f0a6d83072eadcd5048659203cd625151c42d536bb Nov 24 02:21:25 crc kubenswrapper[4755]: I1124 02:21:25.057084 4755 generic.go:334] "Generic (PLEG): container finished" podID="229e04af-3144-4594-978b-46e7e77be937" containerID="2f242590fa284dfa42d2d00679c0c052e805f9c729825760ef6acbce5d8c9aa8" exitCode=0 Nov 24 02:21:25 crc kubenswrapper[4755]: I1124 02:21:25.057133 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcqdv" event={"ID":"229e04af-3144-4594-978b-46e7e77be937","Type":"ContainerDied","Data":"2f242590fa284dfa42d2d00679c0c052e805f9c729825760ef6acbce5d8c9aa8"} Nov 24 02:21:25 crc kubenswrapper[4755]: I1124 02:21:25.057415 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcqdv" event={"ID":"229e04af-3144-4594-978b-46e7e77be937","Type":"ContainerStarted","Data":"8e0b524579784a9c190579f0a6d83072eadcd5048659203cd625151c42d536bb"} Nov 24 02:21:25 crc kubenswrapper[4755]: I1124 02:21:25.059793 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tl6n" event={"ID":"0fe33c34-d157-48cc-8853-2592600f4b37","Type":"ContainerStarted","Data":"30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3"} Nov 24 02:21:26 crc kubenswrapper[4755]: I1124 02:21:26.071482 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcqdv" event={"ID":"229e04af-3144-4594-978b-46e7e77be937","Type":"ContainerStarted","Data":"c87f40aeaf26a9577992dc49f5a539807e622a3ac10f2489afb3efb6e25ee418"} Nov 24 02:21:26 crc kubenswrapper[4755]: I1124 02:21:26.098803 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8tl6n" podStartSLOduration=4.606778201 podStartE2EDuration="8.098784134s" podCreationTimestamp="2025-11-24 02:21:18 +0000 UTC" firstStartedPulling="2025-11-24 02:21:19.993532479 +0000 UTC m=+4104.679597990" lastFinishedPulling="2025-11-24 02:21:23.485538422 +0000 UTC m=+4108.171603923" observedRunningTime="2025-11-24 02:21:25.09644089 +0000 UTC m=+4109.782506431" watchObservedRunningTime="2025-11-24 02:21:26.098784134 +0000 UTC m=+4110.784849645" Nov 24 02:21:27 crc kubenswrapper[4755]: I1124 02:21:27.086406 4755 generic.go:334] "Generic (PLEG): container finished" podID="229e04af-3144-4594-978b-46e7e77be937" containerID="c87f40aeaf26a9577992dc49f5a539807e622a3ac10f2489afb3efb6e25ee418" exitCode=0 Nov 24 02:21:27 crc kubenswrapper[4755]: I1124 02:21:27.086772 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcqdv" event={"ID":"229e04af-3144-4594-978b-46e7e77be937","Type":"ContainerDied","Data":"c87f40aeaf26a9577992dc49f5a539807e622a3ac10f2489afb3efb6e25ee418"} Nov 24 02:21:28 crc kubenswrapper[4755]: I1124 02:21:28.098827 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcqdv" event={"ID":"229e04af-3144-4594-978b-46e7e77be937","Type":"ContainerStarted","Data":"a140dd9ee9107cc62b4fb1972a083746a10c3258be0c396f5a7cf4bd4d42cda0"} Nov 24 02:21:28 crc kubenswrapper[4755]: I1124 02:21:28.142532 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vcqdv" podStartSLOduration=2.598359439 podStartE2EDuration="5.142505269s" podCreationTimestamp="2025-11-24 02:21:23 +0000 UTC" firstStartedPulling="2025-11-24 02:21:25.059947188 +0000 UTC m=+4109.746012729" lastFinishedPulling="2025-11-24 02:21:27.604093058 +0000 UTC m=+4112.290158559" observedRunningTime="2025-11-24 02:21:28.126891089 +0000 UTC m=+4112.812956580" watchObservedRunningTime="2025-11-24 02:21:28.142505269 +0000 UTC m=+4112.828570780" Nov 24 02:21:28 crc kubenswrapper[4755]: I1124 02:21:28.545682 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:28 crc kubenswrapper[4755]: I1124 02:21:28.545733 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:28 crc kubenswrapper[4755]: I1124 02:21:28.591259 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:29 crc kubenswrapper[4755]: I1124 02:21:29.156978 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.008851 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8tl6n"] Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.124227 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8tl6n" podUID="0fe33c34-d157-48cc-8853-2592600f4b37" containerName="registry-server" containerID="cri-o://30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3" gracePeriod=2 Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.624167 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.628230 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64s2z\" (UniqueName: \"kubernetes.io/projected/0fe33c34-d157-48cc-8853-2592600f4b37-kube-api-access-64s2z\") pod \"0fe33c34-d157-48cc-8853-2592600f4b37\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.628445 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-catalog-content\") pod \"0fe33c34-d157-48cc-8853-2592600f4b37\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.628508 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-utilities\") pod \"0fe33c34-d157-48cc-8853-2592600f4b37\" (UID: \"0fe33c34-d157-48cc-8853-2592600f4b37\") " Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.629393 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-utilities" (OuterVolumeSpecName: "utilities") pod "0fe33c34-d157-48cc-8853-2592600f4b37" (UID: "0fe33c34-d157-48cc-8853-2592600f4b37"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.634798 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fe33c34-d157-48cc-8853-2592600f4b37-kube-api-access-64s2z" (OuterVolumeSpecName: "kube-api-access-64s2z") pod "0fe33c34-d157-48cc-8853-2592600f4b37" (UID: "0fe33c34-d157-48cc-8853-2592600f4b37"). InnerVolumeSpecName "kube-api-access-64s2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.683119 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0fe33c34-d157-48cc-8853-2592600f4b37" (UID: "0fe33c34-d157-48cc-8853-2592600f4b37"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.730718 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64s2z\" (UniqueName: \"kubernetes.io/projected/0fe33c34-d157-48cc-8853-2592600f4b37-kube-api-access-64s2z\") on node \"crc\" DevicePath \"\"" Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.730745 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 02:21:31 crc kubenswrapper[4755]: I1124 02:21:31.730755 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fe33c34-d157-48cc-8853-2592600f4b37-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.138289 4755 generic.go:334] "Generic (PLEG): container finished" podID="0fe33c34-d157-48cc-8853-2592600f4b37" containerID="30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3" exitCode=0 Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.138400 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tl6n" event={"ID":"0fe33c34-d157-48cc-8853-2592600f4b37","Type":"ContainerDied","Data":"30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3"} Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.138742 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8tl6n" event={"ID":"0fe33c34-d157-48cc-8853-2592600f4b37","Type":"ContainerDied","Data":"920bbf58ca8a1a2cdfb7f930ef357f14750235224db979b4ece01746aef8221a"} Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.138422 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8tl6n" Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.138780 4755 scope.go:117] "RemoveContainer" containerID="30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3" Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.165135 4755 scope.go:117] "RemoveContainer" containerID="d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de" Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.172769 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8tl6n"] Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.184354 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8tl6n"] Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.197573 4755 scope.go:117] "RemoveContainer" containerID="7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87" Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.238445 4755 scope.go:117] "RemoveContainer" containerID="30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3" Nov 24 02:21:32 crc kubenswrapper[4755]: E1124 02:21:32.240185 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3\": container with ID starting with 30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3 not found: ID does not exist" containerID="30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3" Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.240218 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3"} err="failed to get container status \"30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3\": rpc error: code = NotFound desc = could not find container \"30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3\": container with ID starting with 30974f3160052b221ad5dbb0f50cab5e345f92916463d648b74ebfb371b88de3 not found: ID does not exist" Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.240239 4755 scope.go:117] "RemoveContainer" containerID="d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de" Nov 24 02:21:32 crc kubenswrapper[4755]: E1124 02:21:32.240942 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de\": container with ID starting with d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de not found: ID does not exist" containerID="d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de" Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.241009 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de"} err="failed to get container status \"d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de\": rpc error: code = NotFound desc = could not find container \"d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de\": container with ID starting with d808616b7b508e66f6219439b12e9eba541a160e4201f0e9ad4ef6dff01034de not found: ID does not exist" Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.241050 4755 scope.go:117] "RemoveContainer" containerID="7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87" Nov 24 02:21:32 crc kubenswrapper[4755]: E1124 02:21:32.241424 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87\": container with ID starting with 7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87 not found: ID does not exist" containerID="7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87" Nov 24 02:21:32 crc kubenswrapper[4755]: I1124 02:21:32.241458 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87"} err="failed to get container status \"7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87\": rpc error: code = NotFound desc = could not find container \"7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87\": container with ID starting with 7b9da7870fe6ff9597ad2aab1f97e1a36476359dcf2369bf15ccbd929d233e87 not found: ID does not exist" Nov 24 02:21:33 crc kubenswrapper[4755]: I1124 02:21:33.296660 4755 patch_prober.go:28] interesting pod/machine-config-daemon-h8xzm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 02:21:33 crc kubenswrapper[4755]: I1124 02:21:33.297082 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 02:21:33 crc kubenswrapper[4755]: I1124 02:21:33.297142 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" Nov 24 02:21:33 crc kubenswrapper[4755]: I1124 02:21:33.297862 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"132aaca735d6247969fce4c7c62b56055bb2249fa4151064cefe83ffdb9514e8"} pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 02:21:33 crc kubenswrapper[4755]: I1124 02:21:33.297935 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" podUID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerName="machine-config-daemon" containerID="cri-o://132aaca735d6247969fce4c7c62b56055bb2249fa4151064cefe83ffdb9514e8" gracePeriod=600 Nov 24 02:21:33 crc kubenswrapper[4755]: I1124 02:21:33.416068 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:33 crc kubenswrapper[4755]: I1124 02:21:33.416188 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:33 crc kubenswrapper[4755]: I1124 02:21:33.471092 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:34 crc kubenswrapper[4755]: I1124 02:21:34.008808 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fe33c34-d157-48cc-8853-2592600f4b37" path="/var/lib/kubelet/pods/0fe33c34-d157-48cc-8853-2592600f4b37/volumes" Nov 24 02:21:34 crc kubenswrapper[4755]: I1124 02:21:34.166401 4755 generic.go:334] "Generic (PLEG): container finished" podID="b1962128-02a0-46c3-82c2-5055c2aed0b9" containerID="132aaca735d6247969fce4c7c62b56055bb2249fa4151064cefe83ffdb9514e8" exitCode=0 Nov 24 02:21:34 crc kubenswrapper[4755]: I1124 02:21:34.166763 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerDied","Data":"132aaca735d6247969fce4c7c62b56055bb2249fa4151064cefe83ffdb9514e8"} Nov 24 02:21:34 crc kubenswrapper[4755]: I1124 02:21:34.166796 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h8xzm" event={"ID":"b1962128-02a0-46c3-82c2-5055c2aed0b9","Type":"ContainerStarted","Data":"44d7f081f094d8d47f2d45ded230758a6649d70038e05c82041fde110efc5ec5"} Nov 24 02:21:34 crc kubenswrapper[4755]: I1124 02:21:34.166816 4755 scope.go:117] "RemoveContainer" containerID="fff609c366ec815c32e40db1bbafb7f7be4ae091a2824075c9d486c88017b349" Nov 24 02:21:34 crc kubenswrapper[4755]: I1124 02:21:34.233795 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:35 crc kubenswrapper[4755]: I1124 02:21:35.219048 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vcqdv"] Nov 24 02:21:37 crc kubenswrapper[4755]: I1124 02:21:37.206806 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vcqdv" podUID="229e04af-3144-4594-978b-46e7e77be937" containerName="registry-server" containerID="cri-o://a140dd9ee9107cc62b4fb1972a083746a10c3258be0c396f5a7cf4bd4d42cda0" gracePeriod=2 Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.218688 4755 generic.go:334] "Generic (PLEG): container finished" podID="229e04af-3144-4594-978b-46e7e77be937" containerID="a140dd9ee9107cc62b4fb1972a083746a10c3258be0c396f5a7cf4bd4d42cda0" exitCode=0 Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.218772 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcqdv" event={"ID":"229e04af-3144-4594-978b-46e7e77be937","Type":"ContainerDied","Data":"a140dd9ee9107cc62b4fb1972a083746a10c3258be0c396f5a7cf4bd4d42cda0"} Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.338548 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.498857 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-utilities\") pod \"229e04af-3144-4594-978b-46e7e77be937\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.498995 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kl9z6\" (UniqueName: \"kubernetes.io/projected/229e04af-3144-4594-978b-46e7e77be937-kube-api-access-kl9z6\") pod \"229e04af-3144-4594-978b-46e7e77be937\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.499220 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-catalog-content\") pod \"229e04af-3144-4594-978b-46e7e77be937\" (UID: \"229e04af-3144-4594-978b-46e7e77be937\") " Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.499919 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-utilities" (OuterVolumeSpecName: "utilities") pod "229e04af-3144-4594-978b-46e7e77be937" (UID: "229e04af-3144-4594-978b-46e7e77be937"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.507845 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/229e04af-3144-4594-978b-46e7e77be937-kube-api-access-kl9z6" (OuterVolumeSpecName: "kube-api-access-kl9z6") pod "229e04af-3144-4594-978b-46e7e77be937" (UID: "229e04af-3144-4594-978b-46e7e77be937"). InnerVolumeSpecName "kube-api-access-kl9z6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.601291 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.601329 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kl9z6\" (UniqueName: \"kubernetes.io/projected/229e04af-3144-4594-978b-46e7e77be937-kube-api-access-kl9z6\") on node \"crc\" DevicePath \"\"" Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.610882 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "229e04af-3144-4594-978b-46e7e77be937" (UID: "229e04af-3144-4594-978b-46e7e77be937"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 02:21:38 crc kubenswrapper[4755]: I1124 02:21:38.702586 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/229e04af-3144-4594-978b-46e7e77be937-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 02:21:39 crc kubenswrapper[4755]: I1124 02:21:39.235166 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcqdv" event={"ID":"229e04af-3144-4594-978b-46e7e77be937","Type":"ContainerDied","Data":"8e0b524579784a9c190579f0a6d83072eadcd5048659203cd625151c42d536bb"} Nov 24 02:21:39 crc kubenswrapper[4755]: I1124 02:21:39.235499 4755 scope.go:117] "RemoveContainer" containerID="a140dd9ee9107cc62b4fb1972a083746a10c3258be0c396f5a7cf4bd4d42cda0" Nov 24 02:21:39 crc kubenswrapper[4755]: I1124 02:21:39.235234 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vcqdv" Nov 24 02:21:39 crc kubenswrapper[4755]: I1124 02:21:39.272701 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vcqdv"] Nov 24 02:21:39 crc kubenswrapper[4755]: I1124 02:21:39.279383 4755 scope.go:117] "RemoveContainer" containerID="c87f40aeaf26a9577992dc49f5a539807e622a3ac10f2489afb3efb6e25ee418" Nov 24 02:21:39 crc kubenswrapper[4755]: I1124 02:21:39.285501 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vcqdv"] Nov 24 02:21:39 crc kubenswrapper[4755]: I1124 02:21:39.325569 4755 scope.go:117] "RemoveContainer" containerID="2f242590fa284dfa42d2d00679c0c052e805f9c729825760ef6acbce5d8c9aa8" Nov 24 02:21:40 crc kubenswrapper[4755]: I1124 02:21:40.008324 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="229e04af-3144-4594-978b-46e7e77be937" path="/var/lib/kubelet/pods/229e04af-3144-4594-978b-46e7e77be937/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515110740611024441 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015110740611017356 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015110730122016475 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015110730123015446 5ustar corecore